summaryrefslogtreecommitdiff
path: root/decoder
diff options
context:
space:
mode:
authorgraehl <graehl@ec762483-ff6d-05da-a07a-a48fb63a330f>2010-07-27 05:00:07 +0000
committergraehl <graehl@ec762483-ff6d-05da-a07a-a48fb63a330f>2010-07-27 05:00:07 +0000
commit684db46e977bca456e02e677d22ba5e4a33ae6ce (patch)
treed4b249ff3f974a21a56fd0038b678cda11dd1bc0 /decoder
parent25acffeb79a3258d978b7a3168b076f959bfb1bb (diff)
set fsa features() properly
git-svn-id: https://ws10smt.googlecode.com/svn/trunk@433 ec762483-ff6d-05da-a07a-a48fb63a330f
Diffstat (limited to 'decoder')
-rwxr-xr-xdecoder/ff_fsa.h20
-rw-r--r--decoder/ff_lm.cc1
-rwxr-xr-xdecoder/ff_lm_fsa.h2
3 files changed, 13 insertions, 10 deletions
diff --git a/decoder/ff_fsa.h b/decoder/ff_fsa.h
index 45837f2c..a5563511 100755
--- a/decoder/ff_fsa.h
+++ b/decoder/ff_fsa.h
@@ -66,6 +66,16 @@ usage: see ff_sample_fsa.h or ff_lm_fsa.h
template <class Impl>
struct FsaFeatureFunctionBase {
+ // CALL 1 of these MANUALLY (because feature name(s) may depend on param, it's not done in ctor)
+ void Init(std::string const& fname="") {
+ fid_=FD::Convert(fname.empty()?name():fname);
+ InitHaveFid();
+ }
+ Features features_;
+ void InitHaveFid() {
+ features_=FeatureFunction::single_feature(fid_);
+ }
+
Impl const& d() const { return static_cast<Impl const&>(*this); }
Impl & d() { return static_cast<Impl &>(*this); }
protected:
@@ -81,16 +91,6 @@ protected:
end_phrase_=singleton_sentence(single);
}
- // CALL 1 of these MANUALLY (because feature name(s) may depend on param, it's not done in ctor)
- void InitFidNamed(std::string const& fname="") {
- fid_=FD::Convert(name.empty()?name():fname);
- Init();
- }
- Features features_;
- void Init() {
- features_=FeatureFunction::single_feature(fid_);
- }
-
inline void static to_state(void *state,char const* begin,char const* end) {
std::memcpy(state,begin,end-begin);
}
diff --git a/decoder/ff_lm.cc b/decoder/ff_lm.cc
index 3d81a599..75778756 100644
--- a/decoder/ff_lm.cc
+++ b/decoder/ff_lm.cc
@@ -594,6 +594,7 @@ void LanguageModelFsa::set_ngram_order(int i) {
LanguageModelFsa::LanguageModelFsa(string const& param) {
int lmorder;
pimpl_ = make_lm_impl(param,&lmorder,&fid_);
+ InitHaveFid();
floor_=pimpl_->floor_;
set_ngram_order(lmorder);
}
diff --git a/decoder/ff_lm_fsa.h b/decoder/ff_lm_fsa.h
index c2c0972e..c1d875eb 100755
--- a/decoder/ff_lm_fsa.h
+++ b/decoder/ff_lm_fsa.h
@@ -39,6 +39,7 @@ struct LanguageModelFsa : public FsaFeatureFunctionBase<LanguageModelFsa> {
//post: [left,e] are the seen left words
return e;
}
+
template <class Accum>
void ScanPhraseAccum(SentenceMetadata const& /* smeta */,const Hypergraph::Edge&edge,WordID const* begin,WordID const* end,void const* old_st,void *new_st,Accum *a) const {
if (begin==end) return; // otherwise w/ shortening it's possible to end up with no words at all.
@@ -73,6 +74,7 @@ struct LanguageModelFsa : public FsaFeatureFunctionBase<LanguageModelFsa> {
FSALMDBGnl(edge);
Add(p,a);
}
+
template <class Accum>
void ScanAccum(SentenceMetadata const& /* smeta */,const Hypergraph::Edge& /* edge */,WordID w,void const* old_st,void *new_st,Accum *a) const {
if (!ctxlen_) {