summaryrefslogtreecommitdiff
path: root/klm/search/rule.cc
diff options
context:
space:
mode:
Diffstat (limited to 'klm/search/rule.cc')
-rw-r--r--klm/search/rule.cc52
1 files changed, 26 insertions, 26 deletions
diff --git a/klm/search/rule.cc b/klm/search/rule.cc
index 5b00207e..0244a09f 100644
--- a/klm/search/rule.cc
+++ b/klm/search/rule.cc
@@ -1,7 +1,7 @@
#include "search/rule.hh"
+#include "lm/model.hh"
#include "search/context.hh"
-#include "search/final.hh"
#include <ostream>
@@ -9,35 +9,35 @@
namespace search {
-template <class Model> float ScoreRule(const Context<Model> &context, const std::vector<lm::WordIndex> &words, bool prepend_bos, lm::ngram::ChartState *writing) {
- unsigned int oov_count = 0;
- float prob = 0.0;
- const Model &model = context.LanguageModel();
- const lm::WordIndex oov = model.GetVocabulary().NotFound();
- for (std::vector<lm::WordIndex>::const_iterator word = words.begin(); ; ++word) {
- lm::ngram::RuleScore<Model> scorer(model, *(writing++));
- // TODO: optimize
- if (prepend_bos && (word == words.begin())) {
- scorer.BeginSentence();
- }
- for (; ; ++word) {
- if (word == words.end()) {
- prob += scorer.Finish();
- return static_cast<float>(oov_count) * context.GetWeights().OOV() + prob * context.GetWeights().LM();
- }
- if (*word == kNonTerminal) break;
- if (*word == oov) ++oov_count;
+template <class Model> ScoreRuleRet ScoreRule(const Model &model, const std::vector<lm::WordIndex> &words, lm::ngram::ChartState *writing) {
+ ScoreRuleRet ret;
+ ret.prob = 0.0;
+ ret.oov = 0;
+ const lm::WordIndex oov = model.GetVocabulary().NotFound(), bos = model.GetVocabulary().BeginSentence();
+ lm::ngram::RuleScore<Model> scorer(model, *(writing++));
+ std::vector<lm::WordIndex>::const_iterator word = words.begin();
+ if (word != words.end() && *word == bos) {
+ scorer.BeginSentence();
+ ++word;
+ }
+ for (; word != words.end(); ++word) {
+ if (*word == kNonTerminal) {
+ ret.prob += scorer.Finish();
+ scorer.Reset(*(writing++));
+ } else {
+ if (*word == oov) ++ret.oov;
scorer.Terminal(*word);
}
- prob += scorer.Finish();
}
+ ret.prob += scorer.Finish();
+ return ret;
}
-template float ScoreRule(const Context<lm::ngram::RestProbingModel> &model, const std::vector<lm::WordIndex> &words, bool prepend_bos, lm::ngram::ChartState *writing);
-template float ScoreRule(const Context<lm::ngram::ProbingModel> &model, const std::vector<lm::WordIndex> &words, bool prepend_bos, lm::ngram::ChartState *writing);
-template float ScoreRule(const Context<lm::ngram::TrieModel> &model, const std::vector<lm::WordIndex> &words, bool prepend_bos, lm::ngram::ChartState *writing);
-template float ScoreRule(const Context<lm::ngram::QuantTrieModel> &model, const std::vector<lm::WordIndex> &words, bool prepend_bos, lm::ngram::ChartState *writing);
-template float ScoreRule(const Context<lm::ngram::ArrayTrieModel> &model, const std::vector<lm::WordIndex> &words, bool prepend_bos, lm::ngram::ChartState *writing);
-template float ScoreRule(const Context<lm::ngram::QuantArrayTrieModel> &model, const std::vector<lm::WordIndex> &words, bool prepend_bos, lm::ngram::ChartState *writing);
+template ScoreRuleRet ScoreRule(const lm::ngram::RestProbingModel &model, const std::vector<lm::WordIndex> &words, lm::ngram::ChartState *writing);
+template ScoreRuleRet ScoreRule(const lm::ngram::ProbingModel &model, const std::vector<lm::WordIndex> &words, lm::ngram::ChartState *writing);
+template ScoreRuleRet ScoreRule(const lm::ngram::TrieModel &model, const std::vector<lm::WordIndex> &words, lm::ngram::ChartState *writing);
+template ScoreRuleRet ScoreRule(const lm::ngram::QuantTrieModel &model, const std::vector<lm::WordIndex> &words, lm::ngram::ChartState *writing);
+template ScoreRuleRet ScoreRule(const lm::ngram::ArrayTrieModel &model, const std::vector<lm::WordIndex> &words, lm::ngram::ChartState *writing);
+template ScoreRuleRet ScoreRule(const lm::ngram::QuantArrayTrieModel &model, const std::vector<lm::WordIndex> &words, lm::ngram::ChartState *writing);
} // namespace search