diff options
author | Patrick Simianer <simianer@cl.uni-heidelberg.de> | 2013-09-17 21:18:27 +0200 |
---|---|---|
committer | Patrick Simianer <simianer@cl.uni-heidelberg.de> | 2013-09-17 21:18:27 +0200 |
commit | b2891df4bc4429fbeec503279fd19e7fafe04a24 (patch) | |
tree | c9fdfadaeffc2258d9100a6c01f0cc15d81a2a1b /training | |
parent | 451d0c7e865cdea9da6a0fb747782886b49eeeef (diff) |
separate inis for shards
Diffstat (limited to 'training')
-rw-r--r-- | training/dtrain/dtrain.cc | 2 | ||||
-rwxr-xr-x | training/dtrain/parallelize.rb | 13 |
2 files changed, 13 insertions, 2 deletions
diff --git a/training/dtrain/dtrain.cc b/training/dtrain/dtrain.cc index 2d090666..4521e794 100644 --- a/training/dtrain/dtrain.cc +++ b/training/dtrain/dtrain.cc @@ -406,8 +406,8 @@ main(int argc, char** argv) if (pclr) { SparseVector<weight_t>::iterator it = sum_up.begin(); for (; it != lambdas.end(); ++it) { - lambdas[it->first] += it->second * max(0.00000001, eta/(eta+learning_rates[it->first])); learning_rates[it->first]++; + lambdas[it->first] += it->second / learning_rates[it->first]; //* max(0.00000001, eta/(eta+learning_rates[it->first])); } } diff --git a/training/dtrain/parallelize.rb b/training/dtrain/parallelize.rb index 285f3c9b..66a61b3d 100755 --- a/training/dtrain/parallelize.rb +++ b/training/dtrain/parallelize.rb @@ -21,6 +21,7 @@ opts = Trollop::options do opt :qsub, "use qsub", :type => :bool, :default => false opt :dtrain_binary, "path to dtrain binary", :type => :string opt :extra_qsub, "extra qsub args", :type => :string, :default => "" + opt :per_shard_decoder_configs, "give special decoder config per shard", :type => string end usage if not opts[:config]&&opts[:shards]&&opts[:input]&&opts[:references] @@ -41,9 +42,11 @@ epochs = opts[:epochs] rand = opts[:randomize] reshard = opts[:reshard] predefined_shards = false +per_shard_decoder_configs = false if opts[:shards] == 0 predefined_shards = true num_shards = 0 + per_shard_decoder_configs = true if opts[:per_shard_decoder_configs] else num_shards = opts[:shards] end @@ -101,6 +104,9 @@ refs_files = [] if predefined_shards input_files = File.new(input).readlines.map {|i| i.strip } refs_files = File.new(refs).readlines.map {|i| i.strip } + if per_shard_decoder_configs + decoder_configs = File.new(opts[:per_shard_decoder_configs]).readlines.map {|i| i.strip} + end num_shards = input_files.size else input_files, refs_files = make_shards input, refs, num_shards, 0, rand @@ -126,8 +132,13 @@ end else local_end = "2>work/out.#{shard}.#{epoch}" end + if per_shard_decoder_configs + cdec_cfg = "--decoder_config #{decoder_configs[shard]}" + else + cdec_cfg = "" + end pids << Kernel.fork { - `#{qsub_str_start}#{dtrain_bin} -c #{ini}\ + `#{qsub_str_start}#{dtrain_bin} -c #{ini} #{cdec_cfg}\ --input #{input_files[shard]}\ --refs #{refs_files[shard]} #{input_weights}\ --output work/weights.#{shard}.#{epoch}#{qsub_str_end} #{local_end}` |