blob: 0267cd4eb9a7425a0275c61bd67516898d8a8af7 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
|
#include "lm/binary_format.hh"
#include "lm/model.hh"
#include "lm/left.hh"
#include "util/tokenize_piece.hh"
template <class Model> void Query(const char *name) {
Model model(name);
std::string line;
lm::ngram::ChartState ignored;
while (getline(std::cin, line)) {
lm::ngram::RuleScore<Model> scorer(model, ignored);
for (util::TokenIter<util::SingleCharacter, true> i(line, ' '); i; ++i) {
scorer.Terminal(model.GetVocabulary().Index(*i));
}
std::cout << scorer.Finish() << '\n';
}
}
int main(int argc, char *argv[]) {
if (argc != 2) {
std::cerr << "Expected model file name." << std::endl;
return 1;
}
const char *name = argv[1];
lm::ngram::ModelType model_type = lm::ngram::PROBING;
lm::ngram::RecognizeBinary(name, model_type);
switch (model_type) {
case lm::ngram::PROBING:
Query<lm::ngram::ProbingModel>(name);
break;
case lm::ngram::REST_PROBING:
Query<lm::ngram::RestProbingModel>(name);
break;
default:
std::cerr << "Model type not supported yet." << std::endl;
}
}
|