diff options
author | graehl <graehl@ec762483-ff6d-05da-a07a-a48fb63a330f> | 2010-07-21 22:09:50 +0000 |
---|---|---|
committer | graehl <graehl@ec762483-ff6d-05da-a07a-a48fb63a330f> | 2010-07-21 22:09:50 +0000 |
commit | 8700e2ee96a71ed267617ce1ebd4ef3a002a1f6c (patch) | |
tree | fa4bb7fd70fbd422a3a7a9564fa8ba86aff2797f /decoder | |
parent | c9416968b391f10891733a00cc57bda27b4d323d (diff) |
disabled TD reserved stuff - debug init assertion later
git-svn-id: https://ws10smt.googlecode.com/svn/trunk@364 ec762483-ff6d-05da-a07a-a48fb63a330f
Diffstat (limited to 'decoder')
-rw-r--r-- | decoder/ff.h | 24 | ||||
-rwxr-xr-x | decoder/ff_fsa.h | 79 | ||||
-rw-r--r-- | decoder/sparse_vector.h | 21 | ||||
-rw-r--r-- | decoder/tdict.cc | 5 | ||||
-rw-r--r-- | decoder/tdict.h | 8 |
5 files changed, 110 insertions, 27 deletions
diff --git a/decoder/ff.h b/decoder/ff.h index 2cf96d39..2b7c7fec 100644 --- a/decoder/ff.h +++ b/decoder/ff.h @@ -46,8 +46,8 @@ public: inline void TraversalFeatures(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, const std::vector<const void*>& ant_contexts, - SparseVector<double>* features, - SparseVector<double>* estimated_features, + FeatureVector* features, + FeatureVector* estimated_features, void* out_state) const { TraversalFeaturesImpl(smeta, edge, ant_contexts, features, estimated_features, out_state); @@ -62,7 +62,7 @@ public: // it here. For example, the language model computes the cost of adding // <s> and </s>. virtual void FinalTraversalFeatures(const void* residual_state, - SparseVector<double>* final_features) const; + FeatureVector* final_features) const; protected: // context is a pointer to a buffer of size NumBytesContext() that the @@ -75,15 +75,15 @@ public: virtual void TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, const std::vector<const void*>& ant_contexts, - SparseVector<double>* features, - SparseVector<double>* estimated_features, + FeatureVector* features, + FeatureVector* estimated_features, void* context) const = 0; // !!! ONLY call this from subclass *CONSTRUCTORS* !!! void SetStateSize(size_t state_size) { state_size_ = state_size; } - + int StateSize() const { return state_size_; } private: int state_size_; }; @@ -102,8 +102,8 @@ class WordPenalty : public FeatureFunction { virtual void TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, const std::vector<const void*>& ant_contexts, - SparseVector<double>* features, - SparseVector<double>* estimated_features, + FeatureVector* features, + FeatureVector* estimated_features, void* context) const; private: const int fid_; @@ -122,8 +122,8 @@ class SourceWordPenalty : public FeatureFunction { virtual void TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, const std::vector<const void*>& ant_contexts, - SparseVector<double>* features, - SparseVector<double>* estimated_features, + FeatureVector* features, + FeatureVector* estimated_features, void* context) const; private: const int fid_; @@ -148,8 +148,8 @@ class ArityPenalty : public FeatureFunction { virtual void TraversalFeaturesImpl(const SentenceMetadata& smeta, const Hypergraph::Edge& edge, const std::vector<const void*>& ant_contexts, - SparseVector<double>* features, - SparseVector<double>* estimated_features, + FeatureVector* features, + FeatureVector* estimated_features, void* context) const; private: std::vector<WordID> fids_; diff --git a/decoder/ff_fsa.h b/decoder/ff_fsa.h index ed159853..a14f9913 100755 --- a/decoder/ff_fsa.h +++ b/decoder/ff_fsa.h @@ -1,49 +1,110 @@ #ifndef FF_FSA_H #define FF_FSA_H +//TODO: actually compile this; probably full of syntax errors. + #include <stdint.h> //C99 #include <string> #include "ff.h" #include "sparse_vector.h" -#include "value_array.h" +#include "value_array.h" // used to hold state #include "tdict.h" +#include "hg.h" typedef ValueArray<uint8_t> Bytes; /* + features whose score is just some PFSA over target string. TODO: could decide to give access to source span of scanned words as well if someone devises a feature that can use it + + state is some fixed width byte array. could actually be a void *, WordID sequence, whatever. */ -struct FsaFeatureFunction { - std::string name; +// it's not necessary to inherit from this. +struct FsaFeatureFunctionBase { + std::string name,usage_short,usage_verbose; + int fid; // you can have more than 1 feature of course. + void InitFid() { // call this, though, if you have a single feature + fid=FD::Convert(name); + } + std::string usage(bool param,bool verbose) { + return FeatureFunction::usage_helper(name,usage_short,usage_verbose,param,verbose); + } + + FsaFeatureFunctionBase(std::string const& name,std::string const& usage_verbose="[no documentation yet]",std::string const& usage_short="[no parameters]") : name(name),usage_short(usage_short),usage_verbose(usage_verbose) { } + + int state_bytes; // don't forget to set this (it may depend on params of course) +}; + +// example: feature val = -1 * # of target words +struct TargetPenaltyFsa : public FsaFeatureFunctionBase { + TargetPenaltyFsa(std::string const& param) : FsaFeatureFunctionBase("TargetPenalty","","-1 per target word") { InitFid(); } + const float val_per_target_word=-1; // state for backoff // scan + void Scan(SentenceMetadata const& smeta,WordID x,void const* prev_state,FeatureVector *features) { + features->set_value(fid,val_per_target_word); + } - // heuristic + // heuristic estimate of phrase + void Heuristic(WordID const* begin, WordID const* end,FeatureVector *h_features) - // all strings x of this length must end in the same state - virtual int MarkovOrder() const { + // return m: all strings x with the same final m+1 letters must end in this state + /* markov chain of order m: P(xn|xn-1...x1)=P(xn|xn-1...xn-m) */ + int MarkovOrder() const { return 0; } +}; + +//TODO: combine 2 FsaFeatures typelist style (can recurse for more) +// the type-erased interface +struct FsaFeatureFunction { + virtual int MarkovOrder() const = 0; + virtual ~FsaFeatureFunction(); + +}; + +// conforming to above interface, type erases FsaImpl +// you might be wondering: why do this? answer: it's cool, and it means that the bottom-up ff over ff_fsa wrapper doesn't go through multiple layers of dynamic dispatch +template <class Impl> +struct FsaFeatureFunctionDynamic : public FsaFeatureFunction { + Impl& d() { return static_cast<Impl&>(*this); } + Impl const& d() { return static_cast<Impl const&>(*this); } + int MarkovOrder() const { return d().MarkovOrder(); } }; +//TODO: combine 2 (or N) FsaFeatureFunction (type erased) + +/* regular bottom up scorer from Fsa feature + uses guarantee about markov order=N to score ASAP + encoding of state: if less than N-1 (ctxlen) words + + either: + struct FF : public FsaImpl,FeatureFunctionFromFsa<FF> (more efficient) + + or: + struct FF : public FsaFeatureFunctionDynamic,FeatureFunctionFromFsa<FF> (code sharing, but double dynamic dispatch) + */ -// regular bottom up scorer from Fsa feature template <class Impl> struct FeatureFunctionFromFsa : public FeatureFunction { Impl& d() { return static_cast<Impl&>(*this); } Impl const& d() { return static_cast<Impl const&>(*this); } - + int M; // markov order (ctx len) FeatureFunctionFromFsa() { } Init() { name=d().name; - SetStateSize(sizeof(WordID)*2*MarkovOrder + M=d().MarkovOrder + SetStateSize(sizeof(WordID)*2*M); } // can't do this in constructor because we come before d() in order virtual Features Features() const { return d().Features(); } + bool rule_feature() const { + return StateSize()==0; // Fsa features don't get info about span + } }; diff --git a/decoder/sparse_vector.h b/decoder/sparse_vector.h index 9894d662..b42e001a 100644 --- a/decoder/sparse_vector.h +++ b/decoder/sparse_vector.h @@ -332,7 +332,7 @@ class SparseVectorList { int c=0; for (;i<end;++i,++c) { if (*i!=z) - p.push_back(pair_type(c,*i)); + p.push_back(Pair(c,*i)); } p.compact(); } @@ -341,10 +341,27 @@ class SparseVectorList { for (unsigned i=0;i<v.size();++i) { T const& t=v[i]; if (t!=z) - p.push_back(pair_type(i,t)); + p.push_back(Pair(i,t)); } p.compact(); } + // unlike SparseVector, this doesn't overwrite - but conversion to SparseVector will use last value, which is the same + void set_value(int i,T const& val) { + p.push_back(Pair(i,val)); + } + void overlay(SparseVector<T> *to) const { + for (int i=0;i<p.size();++i) + to->set_value(p[i].first,p[i].second); + } + void copy_to(SparseVector<T> *to) const { + to->clear(); + overlay(to); + } + SparseVector<T> sparse() const { + SparseVector<T> r; + copy_to(r); + return r; + } private: List p; }; diff --git a/decoder/tdict.cc b/decoder/tdict.cc index 04b82c51..6794bc79 100644 --- a/decoder/tdict.cc +++ b/decoder/tdict.cc @@ -24,7 +24,7 @@ inline void pad(std::string const& pre,int base,int e) { o.str(pre); o<<(i-base)<<'>'; WordID id=TD::Convert(o.str()); - assert(id==i); + assert(id==i); // this fails. why? } } @@ -32,6 +32,8 @@ inline void pad(std::string const& pre,int base,int e) { namespace { struct TD_init { TD_init() { + /* + // disabled for now since it's breaking trunk assert(TD::Convert(TD::ss_str)==TD::ss); assert(TD::Convert(TD::se_str)==TD::se); assert(TD::Convert(TD::unk_str)==TD::unk); @@ -41,6 +43,7 @@ struct TD_init { int reserved_end=TD::begin(); pad("<RESERVED",TD::end(),reserved_end); assert(TD::end()==reserved_end); + */ } }; diff --git a/decoder/tdict.h b/decoder/tdict.h index 26e94edf..e0b4b5f0 100644 --- a/decoder/tdict.h +++ b/decoder/tdict.h @@ -9,21 +9,23 @@ class Vocab; struct TD { + /* // disabled for now static const int reserved_begin=10; // allow room for SRI special tokens e.g. unk ss se pause. tokens until this get "<FILLERi>" static const int n_reserved=10; // 0...n_reserved-1 get token '<RESERVEDi>' static inline WordID reserved(int i) { assert(i>=0 && i<n_reserved); return (WordID)(reserved_begin+i); } + static inline WordID begin() { + return reserved(n_reserved); + } + */ static const WordID max_wordid=0x7fffffff; static const WordID none=(WordID)-1; // Vocab_None static char const* const ss_str; //="<s>"; static char const* const se_str; //="</s>"; static char const* const unk_str; //="<unk>"; static WordID ss,se,unk; // x=Convert(x_str) - static inline WordID begin() { - return reserved(n_reserved); - } static WordID end(); // next id to be assigned; [begin,end) give the non-reserved tokens seen so far static Vocab dict_; static void ConvertSentence(std::string const& sent, std::vector<WordID>* ids); |