diff options
author | Patrick Simianer <p@simianer.de> | 2014-01-28 15:35:31 +0100 |
---|---|---|
committer | Patrick Simianer <p@simianer.de> | 2014-01-28 15:35:31 +0100 |
commit | c83f665cb7efbbfb0fdfa12203b09ba60e365d25 (patch) | |
tree | d9132aaf35e696a52c5e09430ae2889b033cdacb /training | |
parent | 85088dc6e09d4e91038aea46e8d20b5c34053b5f (diff) | |
parent | 3e22fcc3569a2855f691be4e3ee81f644b926c04 (diff) |
resolv conflict in mira
Diffstat (limited to 'training')
-rw-r--r-- | training/mira/kbest_cut_mira.cc | 10 | ||||
-rwxr-xr-x | training/mira/mira.py | 2 |
2 files changed, 7 insertions, 5 deletions
diff --git a/training/mira/kbest_cut_mira.cc b/training/mira/kbest_cut_mira.cc index 9415909e..9de57f5f 100644 --- a/training/mira/kbest_cut_mira.cc +++ b/training/mira/kbest_cut_mira.cc @@ -134,6 +134,7 @@ static const int MAX_SMO = 10; int cur_pass; struct HypothesisInfo { + HypothesisInfo() : mt_metric(), hope(), fear(), alpha(), oracle_loss() {} SparseVector<double> features; vector<WordID> hyp; double mt_metric; @@ -415,8 +416,9 @@ struct TrainingObserver : public DecoderObserver { template <class Filter> void UpdateOracles(int sent_id, const Hypergraph& forest) { - if (stream) sent_id = 0; + if (stream) sent_id = 0; bool PRINT_LIST= false; + assert(sent_id < oracles.size()); vector<boost::shared_ptr<HypothesisInfo> >& cur_good = oracles[sent_id].good; vector<boost::shared_ptr<HypothesisInfo> >& cur_bad = oracles[sent_id].bad; //TODO: look at keeping previous iterations hypothesis lists around @@ -813,7 +815,6 @@ int main(int argc, char** argv) { } else if(optimizer == 1) //sgd - nonadapted step size { - lambdas += (cur_good.features) * max_step_size; lambdas -= (cur_bad.features) * max_step_size; } @@ -932,10 +933,11 @@ int main(int argc, char** argv) { lambdas += (cur_pair[1]->features) * step_size; lambdas -= (cur_pair[0]->features) * step_size; if (VERBOSE) cerr << " Lambdas " << lambdas << endl; - //reload weights based on update + //reload weights based on update dense_weights.clear(); lambdas.init_vector(&dense_weights); + ShowLargestFeatures(dense_weights); dense_w_local = dense_weights; iter++; @@ -974,7 +976,7 @@ int main(int argc, char** argv) { for(int u=0;u!=cur_constraint.size();u++) { - cerr << cur_constraint[u]->alpha << " " << cur_constraint[u]->hope << " " << cur_constraint[u]->fear << endl; + cerr << "alpha=" << cur_constraint[u]->alpha << " hope=" << cur_constraint[u]->hope << " fear=" << cur_constraint[u]->fear << endl; temp_objective += cur_constraint[u]->alpha * cur_constraint[u]->fear; } objective += temp_objective; diff --git a/training/mira/mira.py b/training/mira/mira.py index 1555cbb4..1861da1a 100755 --- a/training/mira/mira.py +++ b/training/mira/mira.py @@ -447,7 +447,7 @@ def optimize(args, script_dir, dev_size): new_weights_file = '{}/weights.{}'.format(args.output_dir, i+1) last_weights_file = '{}/weights.{}'.format(args.output_dir, i) i += 1 - weight_files = args.output_dir+'/weights.pass*/weights.mira-pass*[0-9].gz' + weight_files = weightdir+'/weights.mira-pass*.*[0-9].gz' average_weights(new_weights_file, weight_files) logging.info('BEST ITERATION: {} (SCORE={})'.format( |