From 4955a3d76a535fad2a3f9e504f2eeeefc0a266fd Mon Sep 17 00:00:00 2001 From: graehl Date: Mon, 5 Jul 2010 18:53:00 +0000 Subject: comment unused var names, todo git-svn-id: https://ws10smt.googlecode.com/svn/trunk@133 ec762483-ff6d-05da-a07a-a48fb63a330f --- decoder/apply_models.cc | 6 +++++- decoder/bottom_up_parser.cc | 8 +++++--- decoder/ff_lm.h | 2 +- decoder/ff_wordalign.cc | 37 +++++++++++++++++++++---------------- decoder/hg.cc | 2 +- decoder/stringlib.h | 7 +++++++ 6 files changed, 40 insertions(+), 22 deletions(-) (limited to 'decoder') diff --git a/decoder/apply_models.cc b/decoder/apply_models.cc index 2908005f..0b667de0 100644 --- a/decoder/apply_models.cc +++ b/decoder/apply_models.cc @@ -1,3 +1,7 @@ +//TODO: (for many nonterminals, or multi-rescoring pass) either global +//best-first, or group by (NT,span) - use prev forest outside as a (admissable, +//if models are a subset and weights are same) heuristic + #include "apply_models.h" #include @@ -207,7 +211,7 @@ public: new_edge->prev_j_ = item->out_edge_.prev_j_; Candidate*& o_item = (*s2n)[item->state_]; if (!o_item) o_item = item; - + int& node_id = o_item->node_index_; if (node_id < 0) { Hypergraph::Node* new_node = out.AddNode(in.nodes_[item->in_edge_->head_node_].cat_); diff --git a/decoder/bottom_up_parser.cc b/decoder/bottom_up_parser.cc index dd54a606..94f209b5 100644 --- a/decoder/bottom_up_parser.cc +++ b/decoder/bottom_up_parser.cc @@ -1,3 +1,5 @@ +//TODO: when using many nonterminals, group passive edges for a span (treat all as a single X for the active items). + #include "bottom_up_parser.h" #include @@ -17,8 +19,8 @@ struct ParserStats { } int active_items; int passive_items; - void NotifyActive(int i, int j) { ++active_items; } - void NotifyPassive(int i, int j) { ++passive_items; } + void NotifyActive(int , int ) { ++active_items; } + void NotifyPassive(int , int ) { ++passive_items; } }; ParserStats stats; @@ -148,7 +150,7 @@ class ActiveChart { private: const Hypergraph* hg_; Array2D > act_chart_; - const PassiveChart& psv_chart_; + const PassiveChart& psv_chart_; }; PassiveChart::PassiveChart(const string& goal, diff --git a/decoder/ff_lm.h b/decoder/ff_lm.h index 45fc1da7..10a3e9a3 100644 --- a/decoder/ff_lm.h +++ b/decoder/ff_lm.h @@ -26,7 +26,7 @@ class LanguageModel : public FeatureFunction { SparseVector* estimated_features, void* out_context) const; private: - const int fid_; + int fid_; // conceptually const; mutable only to simplify constructor mutable LanguageModelImpl* pimpl_; }; diff --git a/decoder/ff_wordalign.cc b/decoder/ff_wordalign.cc index 669aa530..0ba2bf92 100644 --- a/decoder/ff_wordalign.cc +++ b/decoder/ff_wordalign.cc @@ -17,7 +17,7 @@ static const int MAX_SENTENCE_SIZE = 100; using namespace std; -Model2BinaryFeatures::Model2BinaryFeatures(const string& param) : +Model2BinaryFeatures::Model2BinaryFeatures(const string& ) : fids_(boost::extents[MAX_SENTENCE_SIZE][MAX_SENTENCE_SIZE][MAX_SENTENCE_SIZE]) { for (int i = 1; i < MAX_SENTENCE_SIZE; ++i) { for (int j = 0; j < i; ++j) { @@ -36,10 +36,12 @@ Model2BinaryFeatures::Model2BinaryFeatures(const string& param) : void Model2BinaryFeatures::TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, - const vector& ant_states, + const vector& /*ant_states*/, SparseVector* features, - SparseVector* estimated_features, - void* state) const { + SparseVector* // estimated_features + , + void* // state + ) const { // if the source word is either null or the generated word // has no position in the reference if (edge.i_ == -1 || edge.prev_i_ == -1) @@ -82,10 +84,13 @@ RelativeSentencePosition::RelativeSentencePosition(const string& param) : void RelativeSentencePosition::TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, - const vector& ant_states, + const vector& // ant_states + , SparseVector* features, - SparseVector* estimated_features, - void* state) const { + SparseVector* // estimated_features + , + void* // state + ) const { // if the source word is either null or the generated word // has no position in the reference if (edge.i_ == -1 || edge.prev_i_ == -1) @@ -159,7 +164,7 @@ void MarkovJumpFClass::TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, const std::vector& ant_states, SparseVector* features, - SparseVector* estimated_features, + SparseVector* /* estimated_features */, void* state) const { unsigned char& dpstate = *((unsigned char*)state); if (edge.Arity() == 0) { @@ -215,7 +220,7 @@ void MarkovJump::TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, const vector& ant_states, SparseVector* features, - SparseVector* estimated_features, + SparseVector* /* estimated_features */, void* state) const { unsigned char& dpstate = *((unsigned char*)state); const int flen = smeta.GetSourceLength(); @@ -305,7 +310,7 @@ void SourcePOSBigram::TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, const std::vector& ant_contexts, SparseVector* features, - SparseVector* estimated_features, + SparseVector* /* estimated_features */, void* context) const { WordID& out_context = *static_cast(context); int& out_word_count = *(static_cast(context) + 1); @@ -347,7 +352,7 @@ AlignerResults::AlignerResults(const std::string& param) : while(in) { string line; getline(in, line); - if (!in) break; + if (!in) break; ++lc; is_aligned_.push_back(AlignerTools::ReadPharaohAlignmentGrid(line)); } @@ -356,10 +361,10 @@ AlignerResults::AlignerResults(const std::string& param) : void AlignerResults::TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, - const vector& ant_states, + const vector& /* ant_states */, SparseVector* features, - SparseVector* estimated_features, - void* state) const { + SparseVector* /* estimated_features */, + void* /* state */) const { if (edge.i_ == -1 || edge.prev_i_ == -1) return; @@ -389,7 +394,7 @@ BlunsomSynchronousParseHack::BlunsomSynchronousParseHack(const string& param) : while(in) { string line; getline(in, line); - if (!in) break; + if (!in) break; ++lc; refs_.push_back(vector()); TD::ConvertSentence(line, &refs_.back()); @@ -401,7 +406,7 @@ void BlunsomSynchronousParseHack::TraversalFeaturesImpl(const SentenceMetadata& const Hypergraph::Edge& edge, const vector& ant_states, SparseVector* features, - SparseVector* estimated_features, + SparseVector* /* estimated_features */, void* state) const { if (cur_sent_ != smeta.GetSentenceID()) { // assert(smeta.HasReference()); diff --git a/decoder/hg.cc b/decoder/hg.cc index e57f7807..70511c07 100644 --- a/decoder/hg.cc +++ b/decoder/hg.cc @@ -1,4 +1,4 @@ -//TODO: lazily generate feature vectors for hyperarcs (because some of them will be pruned). this means 1) storing ref to rule for those features 2) providing ff interface for regenerating its feature vector from hyperedge+states and probably 3) still caching feat. vect on hyperedge once it's been generated. ff would normally just contribute its weighted score and result state, not component features. +//TODO: lazily generate feature vectors for hyperarcs (because some of them will be pruned). this means 1) storing ref to rule for those features 2) providing ff interface for regenerating its feature vector from hyperedge+states and probably 3) still caching feat. vect on hyperedge once it's been generated. ff would normally just contribute its weighted score and result state, not component features. however, the hypergraph drops the state used by ffs after rescoring is done, so recomputation would have to start at the leaves and work bottom up. question: which takes more space, feature id+value, or state? #include "hg.h" diff --git a/decoder/stringlib.h b/decoder/stringlib.h index 22863945..eac1dce6 100644 --- a/decoder/stringlib.h +++ b/decoder/stringlib.h @@ -80,6 +80,13 @@ inline int SplitOnWhitespace(const std::string& in, std::vector* ou return out->size(); } +inline std::vector SplitOnWhitespace(std::string const& in) +{ + std::vector r; + SplitOnWhitespace(in,&r); + return r; +} + inline void SplitCommandAndParam(const std::string& in, std::string* cmd, std::string* param) { cmd->clear(); param->clear(); -- cgit v1.2.3