From ef2df950520a47ca7011736648334eedeae5297a Mon Sep 17 00:00:00 2001 From: Patrick Simianer Date: Wed, 19 Oct 2011 20:56:22 +0200 Subject: merged, compiles but not working --- dtrain/dtrain.cc | 75 +++++++++++++++++++++++++++++++++++++------------------- 1 file changed, 50 insertions(+), 25 deletions(-) (limited to 'dtrain/dtrain.cc') diff --git a/dtrain/dtrain.cc b/dtrain/dtrain.cc index 0a94f7aa..e96b65aa 100644 --- a/dtrain/dtrain.cc +++ b/dtrain/dtrain.cc @@ -20,8 +20,8 @@ dtrain_init(int argc, char** argv, po::variables_map* cfg) ("stop_after", po::value()->default_value(0), "stop after X input sentences") ("print_weights", po::value(), "weights to print on each iteration") ("hstreaming", po::value()->zero_tokens(), "run in hadoop streaming mode") - ("learning_rate", po::value()->default_value(0.0005), "learning rate") - ("gamma", po::value()->default_value(0), "gamma for SVM (0 for perceptron)") + ("learning_rate", po::value()->default_value(0.0005), "learning rate") + ("gamma", po::value()->default_value(0), "gamma for SVM (0 for perceptron)") ("tmp", po::value()->default_value("/tmp"), "temp dir to use") ("select_weights", po::value()->default_value("last"), "output 'best' or 'last' weights ('VOID' to throw away)") ("noup", po::value()->zero_tokens(), "do not update weights"); @@ -134,15 +134,14 @@ main(int argc, char** argv) observer->SetScorer(scorer); // init weights - Weights weights; - if (cfg.count("input_weights")) weights.InitFromFile(cfg["input_weights"].as()); - SparseVector lambdas; - weights.InitSparseVector(&lambdas); - vector dense_weights; + vector& dense_weights = decoder.CurrentWeightVector(); + SparseVector lambdas; + if (cfg.count("input_weights")) Weights::InitFromFile(cfg["input_weights"].as(), &dense_weights); + Weights::InitSparseVector(dense_weights, &lambdas); // meta params for perceptron, SVM - double eta = cfg["learning_rate"].as(); - double gamma = cfg["gamma"].as(); + weight_t eta = cfg["learning_rate"].as(); + weight_t gamma = cfg["gamma"].as(); WordID __bias = FD::Convert("__bias"); lambdas.add_value(__bias, 0); @@ -160,7 +159,7 @@ main(int argc, char** argv) grammar_buf_out.open(grammar_buf_fn.c_str()); unsigned in_sz = 999999999; // input index, input size - vector > all_scores; + vector > all_scores; score_t max_score = 0.; unsigned best_it = 0; float overall_time = 0.; @@ -189,6 +188,15 @@ main(int argc, char** argv) } + //LogVal a(2.2); + //LogVal b(2.1); + //cout << a << endl; + //cout << log(a) << endl; + //LogVal c = a - b; + //cout << log(c) << endl; + //exit(0); + + for (unsigned t = 0; t < T; t++) // T epochs { @@ -196,7 +204,8 @@ main(int argc, char** argv) time(&start); igzstream grammar_buf_in; if (t > 0) grammar_buf_in.open(grammar_buf_fn.c_str()); - score_t score_sum = 0., model_sum = 0.; + score_t score_sum = 0.; + score_t model_sum(0); unsigned ii = 0, nup = 0, npairs = 0; if (!quiet) cerr << "Iteration #" << t+1 << " of " << T << "." << endl; @@ -238,10 +247,7 @@ main(int argc, char** argv) if (next || stop) break; // weights - dense_weights.clear(); - weights.InitFromVector(lambdas); - weights.InitVector(&dense_weights); - decoder.SetWeights(dense_weights); + lambdas.init_vector(&dense_weights); // getting input vector in_split; // input: sid\tsrc\tref\tpsg @@ -289,7 +295,8 @@ main(int argc, char** argv) // get (scored) samples vector* samples = observer->GetSamples(); - if (verbose) { + // FIXME + /*if (verbose) { cout << "[ref: '"; if (t > 0) cout << ref_ids_buf[ii]; else cout << ref_ids; @@ -297,7 +304,15 @@ main(int argc, char** argv) cout << _p5 << _np << "1best: " << "'" << (*samples)[0].w << "'" << endl; cout << "SCORE=" << (*samples)[0].score << ",model="<< (*samples)[0].model << endl; cout << "F{" << (*samples)[0].f << "} ]" << endl << endl; - } + }*/ + /*cout << lambdas.get(FD::Convert("PhraseModel_0")) << endl; + cout << (*samples)[0].model << endl; + cout << "1best: "; + for (unsigned u = 0; u < (*samples)[0].w.size(); u++) cout << TD::Convert((*samples)[0].w[u]) << " "; + cout << endl; + cout << (*samples)[0].f << endl; + cout << "___" << endl;*/ + score_sum += (*samples)[0].score; model_sum += (*samples)[0].model; @@ -317,21 +332,21 @@ main(int argc, char** argv) if (!gamma) { // perceptron if (it->first.score - it->second.score < 0) { // rank error - SparseVector dv = it->second.f - it->first.f; + SparseVector dv = it->second.f - it->first.f; dv.add_value(__bias, -1); lambdas.plus_eq_v_times_s(dv, eta); nup++; } } else { // SVM - double rank_error = it->second.score - it->first.score; + score_t rank_error = it->second.score - it->first.score; if (rank_error > 0) { - SparseVector dv = it->second.f - it->first.f; + SparseVector dv = it->second.f - it->first.f; dv.add_value(__bias, -1); lambdas.plus_eq_v_times_s(dv, eta); } // regularization - double margin = it->first.model - it->second.model; + score_t margin = it->first.model - it->second.model; if (rank_error || margin < 1) { lambdas.plus_eq_v_times_s(lambdas, -2*gamma*eta); // reg /= #EXAMPLES or #UPDATES ? nup++; @@ -339,6 +354,15 @@ main(int argc, char** argv) } } } + + + vector x; + lambdas.init_vector(&x); + for (int q = 0; q < x.size(); q++) { + if (x[q] < -10 && x[q] != 0) + cout << FD::Convert(q) << " " << x[q] << endl; + } + cout << " --- " << endl; ++ii; @@ -358,7 +382,8 @@ main(int argc, char** argv) // print some stats score_t score_avg = score_sum/(score_t)in_sz; score_t model_avg = model_sum/(score_t)in_sz; - score_t score_diff, model_diff; + score_t score_diff; + score_t model_diff; if (t > 0) { score_diff = score_avg - all_scores[t-1].first; model_diff = model_avg - all_scores[t-1].second; @@ -402,10 +427,10 @@ main(int argc, char** argv) // write weights to file if (select_weights == "best") { - weights.InitFromVector(lambdas); string infix = "dtrain-weights-" + boost::lexical_cast(t); + lambdas.init_vector(&dense_weights); string w_fn = gettmpf(tmp_path, infix, "gz"); - weights.WriteToFile(w_fn, true); + Weights::WriteToFile(w_fn, dense_weights, true); weights_files.push_back(w_fn); } @@ -420,7 +445,7 @@ main(int argc, char** argv) ostream& o = *of.stream(); o.precision(17); o << _np; - for (SparseVector::const_iterator it = lambdas.begin(); it != lambdas.end(); ++it) { + for (SparseVector::const_iterator it = lambdas.begin(); it != lambdas.end(); ++it) { if (it->second == 0) continue; o << FD::Convert(it->first) << '\t' << it->second << endl; } -- cgit v1.2.3 From 18ba09140e937bde6c94247640484fe44f7cd665 Mon Sep 17 00:00:00 2001 From: Patrick Simianer Date: Wed, 19 Oct 2011 21:41:00 +0200 Subject: debug --- decoder/hg.h | 2 -- dtrain/dtrain.cc | 21 +++++++-------------- dtrain/kbestget.h | 8 +++++--- klm/lm/binary_format.hh | 2 -- klm/lm/model_test.cc | 8 -------- utils/dict.h | 5 ++--- utils/weights.cc | 1 + 7 files changed, 15 insertions(+), 32 deletions(-) (limited to 'dtrain/dtrain.cc') diff --git a/decoder/hg.h b/decoder/hg.h index 52a18601..f0ddbb76 100644 --- a/decoder/hg.h +++ b/decoder/hg.h @@ -397,8 +397,6 @@ public: template void Reweight(const V& weights) { for (int i = 0; i < edges_.size(); ++i) { - SparseVector v; - //v.set_value(FD::Convert("use_shell"), 1000); Edge& e = edges_[i]; e.edge_prob_.logeq(e.feature_values_.dot(weights)); } diff --git a/dtrain/dtrain.cc b/dtrain/dtrain.cc index e96b65aa..795c82fd 100644 --- a/dtrain/dtrain.cc +++ b/dtrain/dtrain.cc @@ -188,15 +188,6 @@ main(int argc, char** argv) } - //LogVal a(2.2); - //LogVal b(2.1); - //cout << a << endl; - //cout << log(a) << endl; - //LogVal c = a - b; - //cout << log(c) << endl; - //exit(0); - - for (unsigned t = 0; t < T; t++) // T epochs { @@ -298,7 +289,7 @@ main(int argc, char** argv) // FIXME /*if (verbose) { cout << "[ref: '"; - if (t > 0) cout << ref_ids_buf[ii]; + if (t > 0) cout << ref_ids_buf[ii]; <--- else cout << ref_ids; cout << endl; cout << _p5 << _np << "1best: " << "'" << (*samples)[0].w << "'" << endl; @@ -355,14 +346,16 @@ main(int argc, char** argv) } } - + // DEBUG vector x; lambdas.init_vector(&x); - for (int q = 0; q < x.size(); q++) { - if (x[q] < -10 && x[q] != 0) - cout << FD::Convert(q) << " " << x[q] << endl; + cout << "[" << ii << "]" << endl; + for (int jj = 0; jj < x.size(); jj++) { + if (x[jj] != 0) + cout << FD::Convert(jj) << " " << x[jj] << endl; } cout << " --- " << endl; + // /DEBUG ++ii; diff --git a/dtrain/kbestget.h b/dtrain/kbestget.h index 4aadee7a..98f289eb 100644 --- a/dtrain/kbestget.h +++ b/dtrain/kbestget.h @@ -107,8 +107,10 @@ struct KBestGetter : public HypSampler ScoredHyp h; h.w = d->yield; h.f = d->feature_values; - h.model = d->score; - cout << i << ". "<< h.model << endl; + h.model = d->score.as_float(); + // DEBUG + cout << i+1 << ". "<< h.model << endl; + // /DEBUG h.rank = i; h.score = scorer_->Score(h.w, *ref_, i); s_.push_back(h); @@ -127,7 +129,7 @@ struct KBestGetter : public HypSampler ScoredHyp h; h.w = d->yield; h.f = d->feature_values; - h.model = -1*log(d->score); + h.model = d->score.as_float(); h.rank = i; h.score = scorer_->Score(h.w, *ref_, i); s_.push_back(h); diff --git a/klm/lm/binary_format.hh b/klm/lm/binary_format.hh index a83f6b89..e9df0892 100644 --- a/klm/lm/binary_format.hh +++ b/klm/lm/binary_format.hh @@ -76,8 +76,6 @@ void MatchCheck(ModelType model_type, unsigned int search_version, const Paramet void SeekPastHeader(int fd, const Parameters ¶ms); -void SeekPastHeader(int fd, const Parameters ¶ms); - uint8_t *SetupBinary(const Config &config, const Parameters ¶ms, std::size_t memory_size, Backing &backing); void ComplainAboutARPA(const Config &config, ModelType model_type); diff --git a/klm/lm/model_test.cc b/klm/lm/model_test.cc index 3585d34b..2654071f 100644 --- a/klm/lm/model_test.cc +++ b/klm/lm/model_test.cc @@ -264,14 +264,6 @@ template void NoUnkCheck(const M &model) { BOOST_CHECK_CLOSE(-100.0, ret.prob, 0.001); } -template void NoUnkCheck(const M &model) { - WordIndex unk_index = 0; - State state; - - FullScoreReturn ret = model.FullScoreForgotState(&unk_index, &unk_index + 1, unk_index, state); - BOOST_CHECK_CLOSE(-100.0, ret.prob, 0.001); -} - template void Everything(const M &m) { Starters(m); Continuation(m); diff --git a/utils/dict.h b/utils/dict.h index 33cca6cf..a3400868 100644 --- a/utils/dict.h +++ b/utils/dict.h @@ -1,7 +1,6 @@ #ifndef DICT_H_ #define DICT_H_ -#include #include #include @@ -73,8 +72,8 @@ class Dict { inline const std::string& Convert(const WordID& id) const { if (id == 0) return b0_; - //assert(id <= (int)words_.size()); - if (id < 0 || id > (int)words_.size()) return b0_; + assert(id <= (int)words_.size()); + //if (id < 0 || id > (int)words_.size()) return b0_; return words_[id-1]; } diff --git a/utils/weights.cc b/utils/weights.cc index f1406cbf..ac407dfb 100644 --- a/utils/weights.cc +++ b/utils/weights.cc @@ -154,3 +154,4 @@ void Weights::ShowLargestFeatures(const vector& w) { cerr << endl; } + -- cgit v1.2.3 From 78baccbb4231bb84a456702d4f574f8e601a8182 Mon Sep 17 00:00:00 2001 From: Patrick Simianer Date: Thu, 20 Oct 2011 02:20:43 +0200 Subject: really merged fix --- dtrain/README.md | 2 +- dtrain/dtrain.cc | 8 ++++---- dtrain/kbestget.h | 7 ++----- dtrain/test/example/dtrain.ini | 8 ++++---- 4 files changed, 11 insertions(+), 14 deletions(-) (limited to 'dtrain/dtrain.cc') diff --git a/dtrain/README.md b/dtrain/README.md index 1ee3823e..b453c649 100644 --- a/dtrain/README.md +++ b/dtrain/README.md @@ -43,7 +43,7 @@ Uncertain, known bugs, problems FIXME ----- -* merge with cdec master +none Data ---- diff --git a/dtrain/dtrain.cc b/dtrain/dtrain.cc index 795c82fd..05c3728d 100644 --- a/dtrain/dtrain.cc +++ b/dtrain/dtrain.cc @@ -349,12 +349,12 @@ main(int argc, char** argv) // DEBUG vector x; lambdas.init_vector(&x); - cout << "[" << ii << "]" << endl; + //cout << "[" << ii << "]" << endl; for (int jj = 0; jj < x.size(); jj++) { - if (x[jj] != 0) - cout << FD::Convert(jj) << " " << x[jj] << endl; + //if (x[jj] != 0) + //cout << FD::Convert(jj) << " " << x[jj] << endl; } - cout << " --- " << endl; + //cout << " --- " << endl; // /DEBUG ++ii; diff --git a/dtrain/kbestget.h b/dtrain/kbestget.h index 98f289eb..abe657d0 100644 --- a/dtrain/kbestget.h +++ b/dtrain/kbestget.h @@ -107,10 +107,7 @@ struct KBestGetter : public HypSampler ScoredHyp h; h.w = d->yield; h.f = d->feature_values; - h.model = d->score.as_float(); - // DEBUG - cout << i+1 << ". "<< h.model << endl; - // /DEBUG + h.model = log(d->score); h.rank = i; h.score = scorer_->Score(h.w, *ref_, i); s_.push_back(h); @@ -129,7 +126,7 @@ struct KBestGetter : public HypSampler ScoredHyp h; h.w = d->yield; h.f = d->feature_values; - h.model = d->score.as_float(); + h.model = log(d->score); h.rank = i; h.score = scorer_->Score(h.w, *ref_, i); s_.push_back(h); diff --git a/dtrain/test/example/dtrain.ini b/dtrain/test/example/dtrain.ini index 96bdbf8e..185d6d90 100644 --- a/dtrain/test/example/dtrain.ini +++ b/dtrain/test/example/dtrain.ini @@ -1,14 +1,14 @@ decoder_config=test/example/cdec.ini k=100 N=3 -gamma=0 #.00001 -epochs=2 +gamma=0 +epochs=5 input=test/example/nc-1k-tabs.gz scorer=stupid_bleu output=- -stop_after=5 +stop_after=100 sample_from=kbest -pair_sampling=all #108010 +pair_sampling=all select_weights=VOID print_weights=Glue WordPenalty LanguageModel LanguageModel_OOV PhraseModel_0 PhraseModel_1 PhraseModel_2 PhraseModel_3 PhraseModel_4 PassThrough tmp=/tmp -- cgit v1.2.3