summaryrefslogtreecommitdiff
path: root/decoder
diff options
context:
space:
mode:
authorKenneth Heafield <kenlm@kheafield.com>2011-09-24 11:33:22 -0400
committerKenneth Heafield <kenlm@kheafield.com>2011-09-24 11:33:22 -0400
commit4e225d86f5c0511c8e7fab42587e54041904d9a9 (patch)
treeccfde47038d8e987916c93f33d8dc3e729bc0bc2 /decoder
parent23fc955f722906ac927df04106c1f0474ba8ca2d (diff)
Chris says remnant and oovs should not be null, so stop checking. Also, we were not properly doing ZeroRemaining, sorry.
Diffstat (limited to 'decoder')
-rw-r--r--decoder/ff_klm.cc10
1 files changed, 4 insertions, 6 deletions
diff --git a/decoder/ff_klm.cc b/decoder/ff_klm.cc
index 3b2113ad..6d9aca54 100644
--- a/decoder/ff_klm.cc
+++ b/decoder/ff_klm.cc
@@ -92,10 +92,9 @@ class KLanguageModelImpl {
public:
double LookupWords(const TRule& rule, const vector<const void*>& ant_states, double* oovs, void* remnant) {
- if (oovs) *oovs = 0;
+ *oovs = 0;
const vector<WordID>& e = rule.e();
- lm::ngram::ChartState state;
- lm::ngram::RuleScore<Model> ruleScore(*ngram_, remnant ? *static_cast<lm::ngram::ChartState*>(remnant) : state);
+ lm::ngram::RuleScore<Model> ruleScore(*ngram_, *static_cast<lm::ngram::ChartState*>(remnant));
unsigned i = 0;
if (e.size()) {
if (e[i] == kCDEC_SOS) {
@@ -115,13 +114,12 @@ class KLanguageModelImpl {
const WordID cdec_word_or_class = ClassifyWordIfNecessary(e[i]); // in future,
// maybe handle emission
const lm::WordIndex cur_word = MapWord(cdec_word_or_class); // map to LM's id
- const bool is_oov = (cur_word == 0);
- if (is_oov && oovs) (*oovs) += 1.0;
+ if (cur_word == 0) (*oovs) += 1.0;
ruleScore.Terminal(cur_word);
}
}
double ret = ruleScore.Finish();
- state.ZeroRemaining();
+ static_cast<lm::ngram::ChartState*>(remnant)->ZeroRemaining();
return ret;
}