From fa45c8489b2f016766a7a946eb7ba2b9140749ed Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Thu, 24 Mar 2011 16:37:06 -0400 Subject: more augment grammar options --- training/augment_grammar.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/training/augment_grammar.cc b/training/augment_grammar.cc index 9ad03b6c..df8d4ee8 100644 --- a/training/augment_grammar.cc +++ b/training/augment_grammar.cc @@ -35,6 +35,7 @@ bool InitCommandLine(int argc, char** argv, po::variables_map* conf) { opts.add_options() ("source_lm,l",po::value(),"Source language LM (KLM)") ("collapse_weights,w",po::value(), "Collapse weights into a single feature X using the coefficients from this weights file") + ("clear_features_after_collapse,c", "After collapse_weights, clear the features except for X") ("add_shape_types,s", "Add rule shape types") ("extra_lex_feature,x", "Experimental nonlinear lexical weighting feature") ("replace_files,r", "Replace files with transformed variants (requires loading full grammar into memory)") @@ -90,6 +91,7 @@ bool extra_feature; int kSrcLM; vector col_weights; bool gather_rules; +bool clear_features = false; vector rules; static void RuleHelper(const TRulePtr& new_rule, const unsigned int ctf_level, const TRulePtr& coarse_rule, void* extra) { @@ -107,7 +109,7 @@ static void RuleHelper(const TRulePtr& new_rule, const unsigned int ctf_level, c } if (col_weights.size()) { double score = r->scores_.dot(col_weights); - r->scores_.clear(); + if (clear_features) r->scores_.clear(); r->scores_.set_value(kX, score); } if (gather_rules) { @@ -136,6 +138,7 @@ int main(int argc, char** argv) { w.InitFromFile(conf["collapse_weights"].as()); w.InitVector(&col_weights); } + clear_features = conf.count("clear_features_after_collapse") > 0; gather_rules = false; bool replace_files = conf.count("replace_files"); if (replace_files) gather_rules = true; -- cgit v1.2.3 From afb41a09cc10db8b47047630c8db3148dfa5f648 Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Thu, 24 Mar 2011 18:04:06 -0400 Subject: various summary feature types, part 1 --- decoder/decoder.cc | 108 ++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 73 insertions(+), 35 deletions(-) diff --git a/decoder/decoder.cc b/decoder/decoder.cc index b7774acc..fdaf8cb1 100644 --- a/decoder/decoder.cc +++ b/decoder/decoder.cc @@ -66,6 +66,13 @@ void DecoderObserver::NotifyAlignmentFailure(const SentenceMetadata&) {} void DecoderObserver::NotifyAlignmentForest(const SentenceMetadata&, Hypergraph*) {} void DecoderObserver::NotifyDecodingComplete(const SentenceMetadata&) {} +enum SummaryFeature { + kNODE_RISK = 1, + kEDGE_RISK, + kEDGE_PROB +}; + + struct ELengthWeightFunction { double operator()(const Hypergraph::Edge& e) const { return e.rule_->ELength() - e.rule_->Arity(); @@ -364,6 +371,7 @@ DecoderImpl::DecoderImpl(po::variables_map& conf, int argc, char** argv, istream ("feature_function,F",po::value >()->composing(), "Pass 1 additional feature function(s) (-L for list)") ("intersection_strategy,I",po::value()->default_value("cube_pruning"), "Pass 1 intersection strategy for incorporating finite-state features; values include Cube_pruning, Full") ("summary_feature", po::value(), "Compute a 'summary feature' at the end of the pass (before any pruning) with name=arg and value=inside-outside/Z") + ("summary_feature_type", po::value()->default_value("node_risk"), "Summary feature types: node_risk, edge_risk, edge_prob") ("density_prune", po::value(), "Pass 1 pruning: keep no more than this many times the number of edges used in the best derivation tree (>=1.0)") ("beam_prune", po::value(), "Pass 1 pruning: Prune paths from scored forest, keep paths within exp(alpha>=0)") @@ -386,8 +394,8 @@ DecoderImpl::DecoderImpl(po::variables_map& conf, int argc, char** argv, istream ("apply_fsa_by",po::value()->default_value("BU_CUBE"), "Method for applying fsa_feature_functions - BU_FULL BU_CUBE EARLEY") //+ApplyFsaBy::all_names() #endif ("add_pass_through_rules,P","Add rules to translate OOV words as themselves") - ("k_best,k",po::value(),"Extract the k best derivations") - ("unique_k_best,r", "Unique k-best translation list") + ("k_best,k",po::value(),"Extract the k best derivations") + ("unique_k_best,r", "Unique k-best translation list") ("cubepruning_pop_limit,K",po::value()->default_value(200), "Max number of pops from the candidate heap at each node") ("aligner,a", "Run as a word/phrase aligner (src & ref required)") ("aligner_use_viterbi", "If run in alignment mode, compute the Viterbi (rather than MAP) alignment") @@ -775,6 +783,18 @@ bool DecoderImpl::Decode(const string& input, DecoderObserver* o) { cerr << " Expected length (words): " << res.r / res.p << "\t" << res << endl; } + SummaryFeature summary_feature_type = kNODE_RISK; + if (conf["summary_feature_type"].as() == "edge_risk") + summary_feature_type = kEDGE_RISK; + else if (conf["summary_feature_type"].as() == "node_risk") + summary_feature_type = kNODE_RISK; + else if (conf["summary_feature_type"].as() == "edge_prob") + summary_feature_type = kEDGE_PROB; + else { + cerr << "Bad summary_feature_type: " << conf["summary_feature_type"].as() << endl; + abort(); + } + for (int pass = 0; pass < rescoring_passes.size(); ++pass) { const RescoringPass& rp = rescoring_passes[pass]; const vector& cur_weights = rp.weight_vector; @@ -806,43 +826,61 @@ bool DecoderImpl::Decode(const string& input, DecoderObserver* o) { } if (rp.fid_summary) { -#if 0 - const prob_t z = forest.PushWeightsToGoal(1.0); - if (!SILENT) { cerr << " " << passtr << " adding summary feature " << FD::Convert(rp.fid_summary) << " log(Z)=" << log(z) << endl; } - if (!isfinite(log(z)) || isnan(log(z))) { - cerr << " " << passtr << " !!! Invalid partition detected, abandoning.\n"; - } else { - for (int i = 0; i < forest.edges_.size(); ++i) { - const double log_prob_transition = log(forest.edges_[i].edge_prob_); // locally normalized by the edge - // head node by forest.PushWeightsToGoal - if (!isfinite(log_prob_transition) || isnan(log_prob_transition)) { - cerr << "Edge: i=" << i << " got bad inside prob: " << *forest.edges_[i].rule_ << endl; - abort(); + if (summary_feature_type == kEDGE_PROB) { + const prob_t z = forest.PushWeightsToGoal(1.0); + if (!isfinite(log(z)) || isnan(log(z))) { + cerr << " " << passtr << " !!! Invalid partition detected, abandoning.\n"; + } else { + for (int i = 0; i < forest.edges_.size(); ++i) { + const double log_prob_transition = log(forest.edges_[i].edge_prob_); // locally normalized by the edge + // head node by forest.PushWeightsToGoal + if (!isfinite(log_prob_transition) || isnan(log_prob_transition)) { + cerr << "Edge: i=" << i << " got bad inside prob: " << *forest.edges_[i].rule_ << endl; + abort(); + } + + forest.edges_[i].feature_values_.set_value(rp.fid_summary, log_prob_transition); } - - forest.edges_[i].feature_values_.set_value(rp.fid_summary, log_prob_transition); + forest.Reweight(cur_weights); // reset weights } - forest.Reweight(cur_weights); // reset weights - } -#endif - Hypergraph::EdgeProbs posts; - const prob_t z = forest.ComputeEdgePosteriors(1.0, &posts); - if (!isfinite(log(z)) || isnan(log(z))) { - cerr << " " << passtr << " !!! Invalid partition detected, abandoning.\n"; - } else { - for (int i = 0; i < forest.nodes_.size(); ++i) { - const Hypergraph::EdgesVector& in_edges = forest.nodes_[i].in_edges_; - prob_t node_post = prob_t(0); - for (int j = 0; j < in_edges.size(); ++j) - node_post += (posts[in_edges[j]] / z); - const double log_np = log(node_post); - if (!isfinite(log_np) || isnan(log_np)) { - cerr << "got bad posterior prob for node " << i << endl; - abort(); + } else if (summary_feature_type == kNODE_RISK) { + Hypergraph::EdgeProbs posts; + const prob_t z = forest.ComputeEdgePosteriors(1.0, &posts); + if (!isfinite(log(z)) || isnan(log(z))) { + cerr << " " << passtr << " !!! Invalid partition detected, abandoning.\n"; + } else { + for (int i = 0; i < forest.nodes_.size(); ++i) { + const Hypergraph::EdgesVector& in_edges = forest.nodes_[i].in_edges_; + prob_t node_post = prob_t(0); + for (int j = 0; j < in_edges.size(); ++j) + node_post += (posts[in_edges[j]] / z); + const double log_np = log(node_post); + if (!isfinite(log_np) || isnan(log_np)) { + cerr << "got bad posterior prob for node " << i << endl; + abort(); + } + for (int j = 0; j < in_edges.size(); ++j) + forest.edges_[in_edges[j]].feature_values_.set_value(rp.fid_summary, exp(log_np)); } - for (int j = 0; j < in_edges.size(); ++j) - forest.edges_[in_edges[j]].feature_values_.set_value(rp.fid_summary, exp(log_np)); } + } else if (summary_feature_type == kEDGE_RISK) { + Hypergraph::EdgeProbs posts; + const prob_t z = forest.ComputeEdgePosteriors(1.0, &posts); + if (!isfinite(log(z)) || isnan(log(z))) { + cerr << " " << passtr << " !!! Invalid partition detected, abandoning.\n"; + } else { + assert(posts.size() == forest.edges_.size()); + for (int i = 0; i < posts.size(); ++i) { + const double log_np = log(posts[i] / z); + if (!isfinite(log_np) || isnan(log_np)) { + cerr << "got bad posterior prob for node " << i << endl; + abort(); + } + forest.edges_[i].feature_values_.set_value(rp.fid_summary, exp(log_np)); + } + } + } else { + assert(!"shouldn't happen"); } } -- cgit v1.2.3