From dba017629ff32bbd0af8770ddee88082bb626bee Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Tue, 28 Jan 2014 00:18:31 -0500 Subject: fix initialization of lagrange multipliers --- training/mira/kbest_cut_mira.cc | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) (limited to 'training') diff --git a/training/mira/kbest_cut_mira.cc b/training/mira/kbest_cut_mira.cc index 990609d7..e0b6eecb 100644 --- a/training/mira/kbest_cut_mira.cc +++ b/training/mira/kbest_cut_mira.cc @@ -133,6 +133,7 @@ static const int MAX_SMO = 10; int cur_pass; struct HypothesisInfo { + HypothesisInfo() : mt_metric(), hope(), fear(), alpha(), oracle_loss() {} SparseVector features; vector hyp; double mt_metric; @@ -414,8 +415,9 @@ struct TrainingObserver : public DecoderObserver { template void UpdateOracles(int sent_id, const Hypergraph& forest) { - if (stream) sent_id = 0; + if (stream) sent_id = 0; bool PRINT_LIST= false; + assert(sent_id < oracles.size()); vector >& cur_good = oracles[sent_id].good; vector >& cur_bad = oracles[sent_id].bad; //TODO: look at keeping previous iterations hypothesis lists around @@ -810,7 +812,6 @@ int main(int argc, char** argv) { } else if(optimizer == 1) //sgd - nonadapted step size { - lambdas += (cur_good.features) * max_step_size; lambdas -= (cur_bad.features) * max_step_size; } @@ -928,11 +929,11 @@ int main(int argc, char** argv) { lambdas += (cur_pair[1]->features) * step_size; lambdas -= (cur_pair[0]->features) * step_size; - cerr << " Lambdas " << lambdas << endl; - //reload weights based on update + //reload weights based on update dense_weights.clear(); lambdas.init_vector(&dense_weights); + ShowLargestFeatures(dense_weights); dense_w_local = dense_weights; iter++; @@ -971,7 +972,7 @@ int main(int argc, char** argv) { for(int u=0;u!=cur_constraint.size();u++) { - cerr << cur_constraint[u]->alpha << " " << cur_constraint[u]->hope << " " << cur_constraint[u]->fear << endl; + cerr << "alpha=" << cur_constraint[u]->alpha << " hope=" << cur_constraint[u]->hope << " fear=" << cur_constraint[u]->fear << endl; temp_objective += cur_constraint[u]->alpha * cur_constraint[u]->fear; } objective += temp_objective; -- cgit v1.2.3 From 3e22fcc3569a2855f691be4e3ee81f644b926c04 Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Tue, 28 Jan 2014 02:36:03 -0500 Subject: what did i do --- training/mira/mira.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'training') diff --git a/training/mira/mira.py b/training/mira/mira.py index d5a1d9f8..c84a8cff 100755 --- a/training/mira/mira.py +++ b/training/mira/mira.py @@ -443,7 +443,7 @@ def optimize(args, script_dir, dev_size): new_weights_file = '{}/weights.{}'.format(args.output_dir, i+1) last_weights_file = '{}/weights.{}'.format(args.output_dir, i) i += 1 - weight_files = args.output_dir+'/weights.pass*/weights.mira-pass*[0-9].gz' + weight_files = weightdir+'/weights.mira-pass*.*[0-9].gz' average_weights(new_weights_file, weight_files) logging.info('BEST ITERATION: {} (SCORE={})'.format( -- cgit v1.2.3