From 33a891dbaf7c77e822f4692c9ac4056f5b29e88c Mon Sep 17 00:00:00 2001 From: Guest_account Guest_account prguest11 Date: Thu, 15 Sep 2011 12:52:59 +0100 Subject: script to filter reachable sentences, weight cleanup --- decoder/hg.h | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) (limited to 'decoder/hg.h') diff --git a/decoder/hg.h b/decoder/hg.h index e5ef05f8..f0ddbb76 100644 --- a/decoder/hg.h +++ b/decoder/hg.h @@ -49,16 +49,14 @@ public: // TODO get rid of cat_? // TODO keep cat_ and add span and/or state? :) struct Node { - Node() : id_(), cat_(), promise(1) {} + Node() : id_(), cat_() {} int id_; // equal to this object's position in the nodes_ vector WordID cat_; // non-terminal category if <0, 0 if not set WordID NT() const { return -cat_; } EdgesVector in_edges_; // an in edge is an edge with this node as its head. (in edges come from the bottom up to us) indices in edges_ EdgesVector out_edges_; // an out edge is an edge with this node as its tail. (out edges leave us up toward the top/goal). indices in edges_ - double promise; // set in global pruning; in [0,infty) so that mean is 1. use: e.g. scale cube poplimit. //TODO: appears to be useless, compile without this? on the other hand, pretty cheap. void copy_fixed(Node const& o) { // nonstructural fields only - structural ones are managed by sorting/pruning/subsetting cat_=o.cat_; - promise=o.promise; } void copy_reindex(Node const& o,indices_after const& n2,indices_after const& e2) { copy_fixed(o); @@ -81,7 +79,7 @@ public: int head_node_; // refers to a position in nodes_ TailNodeVector tail_nodes_; // contents refer to positions in nodes_ TRulePtr rule_; - FeatureVector feature_values_; + SparseVector feature_values_; prob_t edge_prob_; // dot product of weights and feat_values int id_; // equal to this object's position in the edges_ vector @@ -468,7 +466,7 @@ public: /// drop edge i if edge_margin[i] < prune_below, unless preserve_mask[i] void MarginPrune(EdgeProbs const& edge_margin,prob_t prune_below,EdgeMask const* preserve_mask=0,bool safe_inside=false,bool verbose=false); - //TODO: in my opinion, looking at the ratio of logprobs (features \dot weights) rather than the absolute difference generalizes more nicely across sentence lengths and weight vectors that are constant multiples of one another. at least make that an option. i worked around this a little in cdec by making "beam alpha per source word" but that's not helping with different tuning runs. this would also make me more comfortable about allocating Node.promise + //TODO: in my opinion, looking at the ratio of logprobs (features \dot weights) rather than the absolute difference generalizes more nicely across sentence lengths and weight vectors that are constant multiples of one another. at least make that an option. i worked around this a little in cdec by making "beam alpha per source word" but that's not helping with different tuning runs. // beam_alpha=0 means don't beam prune, otherwise drop things that are e^beam_alpha times worse than best - // prunes any edge whose prob_t on the best path taking that edge is more than e^alpha times //density=0 means don't density prune: // for density>=1.0, keep this many times the edges needed for the 1best derivation -- cgit v1.2.3 From 1f54c7b765d4f933f4b631f265933f28e4ae2c93 Mon Sep 17 00:00:00 2001 From: Patrick Simianer Date: Wed, 19 Oct 2011 21:41:00 +0200 Subject: debug --- decoder/hg.h | 2 -- dtrain/dtrain.cc | 21 +++++++-------------- dtrain/kbestget.h | 8 +++++--- klm/lm/binary_format.hh | 2 -- klm/lm/model_test.cc | 8 -------- utils/dict.h | 5 ++--- utils/weights.cc | 1 + 7 files changed, 15 insertions(+), 32 deletions(-) (limited to 'decoder/hg.h') diff --git a/decoder/hg.h b/decoder/hg.h index 52a18601..f0ddbb76 100644 --- a/decoder/hg.h +++ b/decoder/hg.h @@ -397,8 +397,6 @@ public: template void Reweight(const V& weights) { for (int i = 0; i < edges_.size(); ++i) { - SparseVector v; - //v.set_value(FD::Convert("use_shell"), 1000); Edge& e = edges_[i]; e.edge_prob_.logeq(e.feature_values_.dot(weights)); } diff --git a/dtrain/dtrain.cc b/dtrain/dtrain.cc index e96b65aa..795c82fd 100644 --- a/dtrain/dtrain.cc +++ b/dtrain/dtrain.cc @@ -188,15 +188,6 @@ main(int argc, char** argv) } - //LogVal a(2.2); - //LogVal b(2.1); - //cout << a << endl; - //cout << log(a) << endl; - //LogVal c = a - b; - //cout << log(c) << endl; - //exit(0); - - for (unsigned t = 0; t < T; t++) // T epochs { @@ -298,7 +289,7 @@ main(int argc, char** argv) // FIXME /*if (verbose) { cout << "[ref: '"; - if (t > 0) cout << ref_ids_buf[ii]; + if (t > 0) cout << ref_ids_buf[ii]; <--- else cout << ref_ids; cout << endl; cout << _p5 << _np << "1best: " << "'" << (*samples)[0].w << "'" << endl; @@ -355,14 +346,16 @@ main(int argc, char** argv) } } - + // DEBUG vector x; lambdas.init_vector(&x); - for (int q = 0; q < x.size(); q++) { - if (x[q] < -10 && x[q] != 0) - cout << FD::Convert(q) << " " << x[q] << endl; + cout << "[" << ii << "]" << endl; + for (int jj = 0; jj < x.size(); jj++) { + if (x[jj] != 0) + cout << FD::Convert(jj) << " " << x[jj] << endl; } cout << " --- " << endl; + // /DEBUG ++ii; diff --git a/dtrain/kbestget.h b/dtrain/kbestget.h index 4aadee7a..98f289eb 100644 --- a/dtrain/kbestget.h +++ b/dtrain/kbestget.h @@ -107,8 +107,10 @@ struct KBestGetter : public HypSampler ScoredHyp h; h.w = d->yield; h.f = d->feature_values; - h.model = d->score; - cout << i << ". "<< h.model << endl; + h.model = d->score.as_float(); + // DEBUG + cout << i+1 << ". "<< h.model << endl; + // /DEBUG h.rank = i; h.score = scorer_->Score(h.w, *ref_, i); s_.push_back(h); @@ -127,7 +129,7 @@ struct KBestGetter : public HypSampler ScoredHyp h; h.w = d->yield; h.f = d->feature_values; - h.model = -1*log(d->score); + h.model = d->score.as_float(); h.rank = i; h.score = scorer_->Score(h.w, *ref_, i); s_.push_back(h); diff --git a/klm/lm/binary_format.hh b/klm/lm/binary_format.hh index a83f6b89..e9df0892 100644 --- a/klm/lm/binary_format.hh +++ b/klm/lm/binary_format.hh @@ -76,8 +76,6 @@ void MatchCheck(ModelType model_type, unsigned int search_version, const Paramet void SeekPastHeader(int fd, const Parameters ¶ms); -void SeekPastHeader(int fd, const Parameters ¶ms); - uint8_t *SetupBinary(const Config &config, const Parameters ¶ms, std::size_t memory_size, Backing &backing); void ComplainAboutARPA(const Config &config, ModelType model_type); diff --git a/klm/lm/model_test.cc b/klm/lm/model_test.cc index 3585d34b..2654071f 100644 --- a/klm/lm/model_test.cc +++ b/klm/lm/model_test.cc @@ -264,14 +264,6 @@ template void NoUnkCheck(const M &model) { BOOST_CHECK_CLOSE(-100.0, ret.prob, 0.001); } -template void NoUnkCheck(const M &model) { - WordIndex unk_index = 0; - State state; - - FullScoreReturn ret = model.FullScoreForgotState(&unk_index, &unk_index + 1, unk_index, state); - BOOST_CHECK_CLOSE(-100.0, ret.prob, 0.001); -} - template void Everything(const M &m) { Starters(m); Continuation(m); diff --git a/utils/dict.h b/utils/dict.h index 33cca6cf..a3400868 100644 --- a/utils/dict.h +++ b/utils/dict.h @@ -1,7 +1,6 @@ #ifndef DICT_H_ #define DICT_H_ -#include #include #include @@ -73,8 +72,8 @@ class Dict { inline const std::string& Convert(const WordID& id) const { if (id == 0) return b0_; - //assert(id <= (int)words_.size()); - if (id < 0 || id > (int)words_.size()) return b0_; + assert(id <= (int)words_.size()); + //if (id < 0 || id > (int)words_.size()) return b0_; return words_[id-1]; } diff --git a/utils/weights.cc b/utils/weights.cc index f1406cbf..ac407dfb 100644 --- a/utils/weights.cc +++ b/utils/weights.cc @@ -154,3 +154,4 @@ void Weights::ShowLargestFeatures(const vector& w) { cerr << endl; } + -- cgit v1.2.3