diff options
author | Chris Dyer <cdyer@cs.cmu.edu> | 2011-04-22 13:38:32 -0400 |
---|---|---|
committer | Chris Dyer <cdyer@cs.cmu.edu> | 2011-04-22 13:38:32 -0400 |
commit | 9986bc8fe11435b2134350bba24aa0139d2665ac (patch) | |
tree | d27b526d929b788dcdc2b1ab949c608a4751edfe /decoder | |
parent | 5061409a26050b9a4724b50b30d66d3c3a583951 (diff) |
make compatible with FastSparseVector
Diffstat (limited to 'decoder')
-rw-r--r-- | decoder/aligner.cc | 2 | ||||
-rw-r--r-- | decoder/cdec_ff.cc | 2 | ||||
-rw-r--r-- | decoder/dwarf.cc | 2 | ||||
-rw-r--r-- | decoder/dwarf.h | 2 | ||||
-rwxr-xr-x | decoder/feature_accum.h | 10 | ||||
-rwxr-xr-x | decoder/ff_from_fsa.h | 2 | ||||
-rw-r--r-- | decoder/hg.cc | 4 | ||||
-rw-r--r-- | decoder/scfg_translator.cc | 2 | ||||
-rw-r--r-- | decoder/trule.h | 2 |
9 files changed, 15 insertions, 13 deletions
diff --git a/decoder/aligner.cc b/decoder/aligner.cc index 7830b955..292ee123 100644 --- a/decoder/aligner.cc +++ b/decoder/aligner.cc @@ -250,7 +250,7 @@ void AlignerTools::WriteAlignment(const Lattice& src_lattice, SparseVector<prob_t> posts; const prob_t z = InsideOutside<prob_t, EdgeProb, SparseVector<prob_t>, TransitionEventWeightFunction>(*g, &posts); for (int i = 0; i < edge_posteriors.size(); ++i) - edge_posteriors[i] = posts[i] / z; + edge_posteriors[i] = posts.value(i) / z; } vector<set<int> > src_cov(g->edges_.size()); vector<set<int> > trg_cov(g->edges_.size()); diff --git a/decoder/cdec_ff.cc b/decoder/cdec_ff.cc index 7ec54a5a..cbcb0fcd 100644 --- a/decoder/cdec_ff.cc +++ b/decoder/cdec_ff.cc @@ -1,6 +1,7 @@ #include <boost/shared_ptr.hpp> #include "ff.h" +#include "ff_identity.h" #include "ff_spans.h" #include "ff_lm.h" #include "ff_klm.h" @@ -70,6 +71,7 @@ void register_feature_functions() { ff_registry.Register("LexicalPairIndicator", new FFFactory<LexicalPairIndicator>); ff_registry.Register("OutputIndicator", new FFFactory<OutputIndicator>); ff_registry.Register("IdentityCycleDetector", new FFFactory<IdentityCycleDetector>); + ff_registry.Register("FEIdentity", new FFFactory<FEIdentity>); ff_registry.Register("InputIndicator", new FFFactory<InputIndicator>); ff_registry.Register("LexicalTranslationTrigger", new FFFactory<LexicalTranslationTrigger>); ff_registry.Register("WordPairFeatures", new FFFactory<WordPairFeatures>); diff --git a/decoder/dwarf.cc b/decoder/dwarf.cc index 7968fee2..fb0404a6 100644 --- a/decoder/dwarf.cc +++ b/decoder/dwarf.cc @@ -3064,7 +3064,7 @@ void Alignment::SetCurrAlVector() { } } -const void CountTable::print() { +void CountTable::print() const { cerr << "+++ Model +++" << endl; for (map<WordID,int*>::const_iterator iter=model.begin(); iter!=model.end(); iter++) { cerr << TD::Convert(iter->first) << " "; diff --git a/decoder/dwarf.h b/decoder/dwarf.h index 83a0cae9..49d2a3b7 100644 --- a/decoder/dwarf.h +++ b/decoder/dwarf.h @@ -27,7 +27,7 @@ public: map<WordID,int*> model; int mode; int numColumn; - const void print(); + void print() const; void setup(int _numcolumn, int _mode) { mode = _mode; numColumn = _numcolumn; } diff --git a/decoder/feature_accum.h b/decoder/feature_accum.h index 851b29db..4b8338eb 100755 --- a/decoder/feature_accum.h +++ b/decoder/feature_accum.h @@ -7,15 +7,15 @@ struct SparseFeatureAccumulator : public FeatureVector { typedef FeatureVector State; - SparseFeatureAccumulator() { } + SparseFeatureAccumulator() { assert(!"this code is disabled"); } template <class FF> FeatureVector const& describe(FF const& ) { return *this; } void Store(FeatureVector *fv) const { - fv->set_from(*this); +//NO fv->set_from(*this); } template <class FF> void Store(FF const& /* ff */,FeatureVector *fv) const { - fv->set_from(*this); +//NO fv->set_from(*this); } template <class FF> void Add(FF const& /* ff */,FeatureVector const& fv) { @@ -33,10 +33,10 @@ struct SparseFeatureAccumulator : public FeatureVector { } */ void Add(int i,Featval v) { - (*this)[i]+=v; +//NO (*this)[i]+=v; } void Add(Features const& fids,int i,Featval v) { - (*this)[i]+=v; +//NO (*this)[i]+=v; } }; diff --git a/decoder/ff_from_fsa.h b/decoder/ff_from_fsa.h index f2db8a4b..f8d79e03 100755 --- a/decoder/ff_from_fsa.h +++ b/decoder/ff_from_fsa.h @@ -295,7 +295,7 @@ private: # include "ff_sample_fsa.h" int main() { std::cerr<<"Testing left_end...\n"; - std::cerr<<"sizeof(FeatureVector)="<<sizeof(FeatureVector)<<"\nsizeof(FeatureVectorList)="<<sizeof(FeatureVectorList)<<"\n"; + std::cerr<<"sizeof(FeatureVector)="<<sizeof(FeatureVector)<<"\n"; WordPenaltyFromFsa::test(); return 0; } diff --git a/decoder/hg.cc b/decoder/hg.cc index a4028b0e..3ad17f1a 100644 --- a/decoder/hg.cc +++ b/decoder/hg.cc @@ -163,7 +163,7 @@ prob_t Hypergraph::ComputeEdgePosteriors(double scale, vector<prob_t>* posts) co ScaledTransitionEventWeightFunction>(*this, &pv, weight, w2); posts->resize(edges_.size()); for (int i = 0; i < edges_.size(); ++i) - (*posts)[i] = prob_t(pv.get(i)); + (*posts)[i] = prob_t(pv.value(i)); return prob_t(inside); } @@ -176,7 +176,7 @@ prob_t Hypergraph::ComputeBestPathThroughEdges(vector<prob_t>* post) const { ViterbiTransitionEventWeightFunction>(*this, &pv); post->resize(edges_.size()); for (int i = 0; i < edges_.size(); ++i) - (*post)[i] = pv.get(i).v_; + (*post)[i] = pv.value(i).v_; return viterbi_weight.v_; } diff --git a/decoder/scfg_translator.cc b/decoder/scfg_translator.cc index 0f7e40bd..d4bec40d 100644 --- a/decoder/scfg_translator.cc +++ b/decoder/scfg_translator.cc @@ -226,7 +226,7 @@ struct SCFGTranslatorImpl { new_edge->j_ = edge.j_; new_edge->feature_values_ = fine_rule_ptr->GetFeatureValues(); new_edge->feature_values_.set_value(FD::Convert("LatticeCost"), - edge.feature_values_[FD::Convert("LatticeCost")]); + edge.feature_values_.value(FD::Convert("LatticeCost"))); Hypergraph::Node* head_node; Split2Node::iterator it = s2n.find(StateSplit(node.id_, cat)); if (it == s2n.end()){ diff --git a/decoder/trule.h b/decoder/trule.h index 7cd24bc9..4df4ec90 100644 --- a/decoder/trule.h +++ b/decoder/trule.h @@ -132,7 +132,7 @@ class TRule { int Arity() const { return arity_; } bool IsUnary() const { return (Arity() == 1) && (f_.size() == 1); } const SparseVector<double>& GetFeatureValues() const { return scores_; } - double Score(int i) const { return scores_.get(i); } + double Score(int i) const { return scores_.value(i); } WordID GetLHS() const { return lhs_; } void ComputeArity(); |