summaryrefslogtreecommitdiff
path: root/klm/lm/search_hashed.cc
diff options
context:
space:
mode:
authorChris Dyer <cdyer@cs.cmu.edu>2011-02-10 20:51:47 -0500
committerChris Dyer <cdyer@cs.cmu.edu>2011-02-10 20:51:47 -0500
commit3b9de9297b69bc74c1dd9d76d59253667ff58cc5 (patch)
treef46799f05846a3d1dd63222abf1d725300c23f48 /klm/lm/search_hashed.cc
parent9a695967a5e4efc987b61bb3df90c0558c678512 (diff)
kenlm fix
Diffstat (limited to 'klm/lm/search_hashed.cc')
-rw-r--r--klm/lm/search_hashed.cc5
1 files changed, 3 insertions, 2 deletions
diff --git a/klm/lm/search_hashed.cc b/klm/lm/search_hashed.cc
index 00d03f4e..f97ec790 100644
--- a/klm/lm/search_hashed.cc
+++ b/klm/lm/search_hashed.cc
@@ -95,13 +95,14 @@ template <class MiddleT, class LongestT> template <class Voc> void TemplateHashe
ReadNGrams(f, n, counts[n-1], vocab, middle, ActivateLowerMiddle<Middle>(middle[n-3]), middle[n-2]);
}
if (counts.size() > 2) {
- ReadNGrams(f, counts.size(), counts[counts.size() - 1], vocab, middle, ActivateUnigram(unigram.Raw()), longest);
- } else {
ReadNGrams(f, counts.size(), counts[counts.size() - 1], vocab, middle, ActivateLowerMiddle<Middle>(middle.back()), longest);
+ } else {
+ ReadNGrams(f, counts.size(), counts[counts.size() - 1], vocab, middle, ActivateUnigram(unigram.Raw()), longest);
}
} catch (util::ProbingSizeException &e) {
UTIL_THROW(util::ProbingSizeException, "Avoid pruning n-grams like \"bar baz quux\" when \"foo bar baz quux\" is still in the model. KenLM will work when this pruning happens, but the probing model assumes these events are rare enough that using blank space in the probing hash table will cover all of them. Increase probing_multiplier (-p to build_binary) to add more blank spaces.\n");
}
+ ReadEnd(f);
}
template void TemplateHashedSearch<ProbingHashedSearch::Middle, ProbingHashedSearch::Longest>::InitializeFromARPA(const char *, util::FilePiece &f, const std::vector<uint64_t> &counts, const Config &, ProbingVocabulary &vocab, Backing &backing);