summaryrefslogtreecommitdiff
path: root/decoder
diff options
context:
space:
mode:
authorKenneth Heafield <github@kheafield.com>2012-10-22 14:04:27 +0100
committerKenneth Heafield <github@kheafield.com>2012-10-22 14:04:27 +0100
commit1fb7bfbbe287e868522613871ed6ca74369ed2a1 (patch)
tree6c06e30cdb32f1116f6cf5fdc7ac74b96a11013e /decoder
parentac586bc9b156b4ae687cd5961ba1fe7b20ec57d6 (diff)
Update search, make it compile
Diffstat (limited to 'decoder')
-rw-r--r--decoder/Makefile.am3
-rw-r--r--decoder/decoder.cc8
-rw-r--r--decoder/incremental.cc (renamed from decoder/lazy.cc)96
-rw-r--r--decoder/incremental.h11
-rw-r--r--decoder/lazy.h11
5 files changed, 68 insertions, 61 deletions
diff --git a/decoder/Makefile.am b/decoder/Makefile.am
index 5c0a1964..f8f427d3 100644
--- a/decoder/Makefile.am
+++ b/decoder/Makefile.am
@@ -17,7 +17,7 @@ trule_test_SOURCES = trule_test.cc
trule_test_LDADD = $(BOOST_UNIT_TEST_FRAMEWORK_LDFLAGS) $(BOOST_UNIT_TEST_FRAMEWORK_LIBS) libcdec.a ../mteval/libmteval.a ../utils/libutils.a -lz
cdec_SOURCES = cdec.cc
-cdec_LDADD = libcdec.a ../mteval/libmteval.a ../utils/libutils.a ../klm/lm/libklm.a ../klm/util/libklm_util.a -lz
+cdec_LDADD = libcdec.a ../mteval/libmteval.a ../utils/libutils.a ../klm/search/libksearch.a ../klm/lm/libklm.a ../klm/util/libklm_util.a -lz
AM_CPPFLAGS = -DBOOST_TEST_DYN_LINK -W -Wno-sign-compare $(GTEST_CPPFLAGS) -I.. -I../mteval -I../utils -I../klm
@@ -73,6 +73,7 @@ libcdec_a_SOURCES = \
ff_source_syntax.cc \
ff_bleu.cc \
ff_factory.cc \
+ incremental.cc \
lexalign.cc \
lextrans.cc \
tagger.cc \
diff --git a/decoder/decoder.cc b/decoder/decoder.cc
index 052823ca..fe812011 100644
--- a/decoder/decoder.cc
+++ b/decoder/decoder.cc
@@ -39,7 +39,7 @@
#include "sampler.h"
#include "forest_writer.h" // TODO this section should probably be handled by an Observer
-#include "lazy.h"
+#include "incremental.h"
#include "hg_io.h"
#include "aligner.h"
@@ -412,7 +412,7 @@ DecoderImpl::DecoderImpl(po::variables_map& conf, int argc, char** argv, istream
("show_conditional_prob", "Output the conditional log prob to STDOUT instead of a translation")
("show_cfg_search_space", "Show the search space as a CFG")
("show_target_graph", po::value<string>(), "Directory to write the target hypergraphs to")
- ("lazy_search", po::value<string>(), "Run lazy search with this language model file")
+ ("incremental_search", po::value<string>(), "Run lazy search with this language model file")
("coarse_to_fine_beam_prune", po::value<double>(), "Prune paths from coarse parse forest before fine parse, keeping paths within exp(alpha>=0)")
("ctf_beam_widen", po::value<double>()->default_value(2.0), "Expand coarse pass beam by this factor if no fine parse is found")
("ctf_num_widenings", po::value<int>()->default_value(2), "Widen coarse beam this many times before backing off to full parse")
@@ -828,8 +828,8 @@ bool DecoderImpl::Decode(const string& input, DecoderObserver* o) {
if (conf.count("show_target_graph"))
HypergraphIO::WriteTarget(conf["show_target_graph"].as<string>(), sent_id, forest);
- if (conf.count("lazy_search")) {
- PassToLazy(conf["lazy_search"].as<string>().c_str(), CurrentWeightVector(), pop_limit, forest);
+ if (conf.count("incremental_search")) {
+ PassToIncremental(conf["incremental_search"].as<string>().c_str(), CurrentWeightVector(), pop_limit, forest);
o->NotifyDecodingComplete(smeta);
return true;
}
diff --git a/decoder/lazy.cc b/decoder/incremental.cc
index 1e6a94fe..768bbd65 100644
--- a/decoder/lazy.cc
+++ b/decoder/incremental.cc
@@ -1,5 +1,6 @@
+#include "incremental.h"
+
#include "hg.h"
-#include "lazy.h"
#include "fdict.h"
#include "tdict.h"
@@ -8,7 +9,8 @@
#include "search/config.hh"
#include "search/context.hh"
#include "search/edge.hh"
-#include "search/edge_queue.hh"
+#include "search/edge_generator.hh"
+#include "search/rule.hh"
#include "search/vertex.hh"
#include "search/vertex_generator.hh"
#include "util/exception.hh"
@@ -41,19 +43,19 @@ struct MapVocab : public lm::EnumerateVocab {
std::vector<lm::WordIndex> out_;
};
-class LazyBase {
+class IncrementalBase {
public:
- LazyBase(const std::vector<weight_t> &weights) :
+ IncrementalBase(const std::vector<weight_t> &weights) :
cdec_weights_(weights),
weights_(weights[FD::Convert("KLanguageModel")], weights[FD::Convert("KLanguageModel_OOV")], weights[FD::Convert("WordPenalty")]) {
std::cerr << "Weights KLanguageModel " << weights_.LM() << " KLanguageModel_OOV " << weights_.OOV() << " WordPenalty " << weights_.WordPenalty() << std::endl;
}
- virtual ~LazyBase() {}
+ virtual ~IncrementalBase() {}
virtual void Search(unsigned int pop_limit, const Hypergraph &hg) const = 0;
- static LazyBase *Load(const char *model_file, const std::vector<weight_t> &weights);
+ static IncrementalBase *Load(const char *model_file, const std::vector<weight_t> &weights);
protected:
lm::ngram::Config GetConfig() {
@@ -69,110 +71,114 @@ class LazyBase {
const search::Weights weights_;
};
-template <class Model> class Lazy : public LazyBase {
+template <class Model> class Incremental : public IncrementalBase {
public:
- Lazy(const char *model_file, const std::vector<weight_t> &weights) : LazyBase(weights), m_(model_file, GetConfig()) {}
+ Incremental(const char *model_file, const std::vector<weight_t> &weights) : IncrementalBase(weights), m_(model_file, GetConfig()) {}
void Search(unsigned int pop_limit, const Hypergraph &hg) const;
private:
- unsigned char ConvertEdge(const search::Context<Model> &context, bool final, search::Vertex *vertices, const Hypergraph::Edge &in, search::PartialEdge &out) const;
+ void ConvertEdge(const search::Context<Model> &context, bool final, search::Vertex *vertices, const Hypergraph::Edge &in, search::EdgeGenerator &gen) const;
const Model m_;
};
-LazyBase *LazyBase::Load(const char *model_file, const std::vector<weight_t> &weights) {
+IncrementalBase *IncrementalBase::Load(const char *model_file, const std::vector<weight_t> &weights) {
lm::ngram::ModelType model_type;
if (!lm::ngram::RecognizeBinary(model_file, model_type)) model_type = lm::ngram::PROBING;
switch (model_type) {
case lm::ngram::PROBING:
- return new Lazy<lm::ngram::ProbingModel>(model_file, weights);
+ return new Incremental<lm::ngram::ProbingModel>(model_file, weights);
case lm::ngram::REST_PROBING:
- return new Lazy<lm::ngram::RestProbingModel>(model_file, weights);
+ return new Incremental<lm::ngram::RestProbingModel>(model_file, weights);
default:
UTIL_THROW(util::Exception, "Sorry this lm type isn't supported yet.");
}
}
-void PrintFinal(const Hypergraph &hg, const search::Final &final) {
+void PrintFinal(const Hypergraph &hg, const search::Final final) {
const std::vector<WordID> &words = static_cast<const Hypergraph::Edge*>(final.GetNote().vp)->rule_->e();
- boost::array<const search::Final*, search::kMaxArity>::const_iterator child(final.Children().begin());
+ const search::Final *child(final.Children());
for (std::vector<WordID>::const_iterator i = words.begin(); i != words.end(); ++i) {
if (*i > 0) {
std::cout << TD::Convert(*i) << ' ';
} else {
- PrintFinal(hg, **child++);
+ PrintFinal(hg, *child++);
}
}
}
-template <class Model> void Lazy<Model>::Search(unsigned int pop_limit, const Hypergraph &hg) const {
+template <class Model> void Incremental<Model>::Search(unsigned int pop_limit, const Hypergraph &hg) const {
boost::scoped_array<search::Vertex> out_vertices(new search::Vertex[hg.nodes_.size()]);
search::Config config(weights_, pop_limit);
search::Context<Model> context(config, m_);
for (unsigned int i = 0; i < hg.nodes_.size() - 1; ++i) {
- search::EdgeQueue queue(context.PopLimit());
+ search::EdgeGenerator gen;
const Hypergraph::EdgesVector &down_edges = hg.nodes_[i].in_edges_;
for (unsigned int j = 0; j < down_edges.size(); ++j) {
unsigned int edge_index = down_edges[j];
- unsigned char arity = ConvertEdge(context, i == hg.nodes_.size() - 2, out_vertices.get(), hg.edges_[edge_index], queue.InitializeEdge());
- search::Note note;
- note.vp = &hg.edges_[edge_index];
- if (arity != 255) queue.AddEdge(arity, note);
+ ConvertEdge(context, i == hg.nodes_.size() - 2, out_vertices.get(), hg.edges_[edge_index], gen);
}
search::VertexGenerator vertex_gen(context, out_vertices[i]);
- queue.Search(context, vertex_gen);
+ gen.Search(context, vertex_gen);
}
- const search::Final *top = out_vertices[hg.nodes_.size() - 2].BestChild();
- if (!top) {
+ const search::Final top = out_vertices[hg.nodes_.size() - 2].BestChild();
+ if (top.Valid()) {
std::cout << "NO PATH FOUND" << std::endl;
} else {
- PrintFinal(hg, *top);
- std::cout << "||| " << top->Bound() << std::endl;
+ PrintFinal(hg, top);
+ std::cout << "||| " << top.GetScore() << std::endl;
}
}
-template <class Model> unsigned char Lazy<Model>::ConvertEdge(const search::Context<Model> &context, bool final, search::Vertex *vertices, const Hypergraph::Edge &in, search::PartialEdge &out) const {
+template <class Model> void Incremental<Model>::ConvertEdge(const search::Context<Model> &context, bool final, search::Vertex *vertices, const Hypergraph::Edge &in, search::EdgeGenerator &gen) const {
const std::vector<WordID> &e = in.rule_->e();
std::vector<lm::WordIndex> words;
+ words.reserve(e.size());
+ std::vector<search::PartialVertex> nts;
unsigned int terminals = 0;
- unsigned char nt = 0;
- out.score = 0.0;
+ float score = 0.0;
for (std::vector<WordID>::const_iterator word = e.begin(); word != e.end(); ++word) {
if (*word <= 0) {
- out.nt[nt] = vertices[in.tail_nodes_[-*word]].RootPartial();
- if (out.nt[nt].Empty()) return 255;
- out.score += out.nt[nt].Bound();
- ++nt;
+ nts.push_back(vertices[in.tail_nodes_[-*word]].RootPartial());
+ if (nts.back().Empty()) return;
+ score += nts.back().Bound();
words.push_back(lm::kMaxWordIndex);
} else {
++terminals;
words.push_back(vocab_.FromCDec(*word));
}
}
- for (unsigned char fill = nt; fill < search::kMaxArity; ++fill) {
- out.nt[fill] = search::kBlankPartialVertex;
- }
if (final) {
words.push_back(m_.GetVocabulary().EndSentence());
}
- out.score += in.rule_->GetFeatureValues().dot(cdec_weights_);
- out.score -= static_cast<float>(terminals) * context.GetWeights().WordPenalty() / M_LN10;
- out.score += search::ScoreRule(context, words, final, out.between);
- return nt;
+ search::PartialEdge out(gen.AllocateEdge(nts.size()));
+
+ memcpy(out.NT(), &nts[0], sizeof(search::PartialVertex) * nts.size());
+
+ search::Note note;
+ note.vp = &in;
+ out.SetNote(note);
+
+ score += in.rule_->GetFeatureValues().dot(cdec_weights_);
+ score -= static_cast<float>(terminals) * context.GetWeights().WordPenalty() / M_LN10;
+ score += search::ScoreRule(context, words, final, out.Between());
+ out.SetScore(score);
+
+ gen.AddEdge(out);
}
-boost::scoped_ptr<LazyBase> AwfulGlobalLazy;
+boost::scoped_ptr<IncrementalBase> AwfulGlobalIncremental;
} // namespace
-void PassToLazy(const char *model_file, const std::vector<weight_t> &weights, unsigned int pop_limit, const Hypergraph &hg) {
- if (!AwfulGlobalLazy.get()) {
+void PassToIncremental(const char *model_file, const std::vector<weight_t> &weights, unsigned int pop_limit, const Hypergraph &hg) {
+ if (!AwfulGlobalIncremental.get()) {
std::cerr << "Pop limit " << pop_limit << std::endl;
- AwfulGlobalLazy.reset(LazyBase::Load(model_file, weights));
+ AwfulGlobalIncremental.reset(IncrementalBase::Load(model_file, weights));
}
- AwfulGlobalLazy->Search(pop_limit, hg);
+ AwfulGlobalIncremental->Search(pop_limit, hg);
}
diff --git a/decoder/incremental.h b/decoder/incremental.h
new file mode 100644
index 00000000..180383ce
--- /dev/null
+++ b/decoder/incremental.h
@@ -0,0 +1,11 @@
+#ifndef _INCREMENTAL_H_
+#define _INCREMENTAL_H_
+
+#include "weights.h"
+#include <vector>
+
+class Hypergraph;
+
+void PassToIncremental(const char *model_file, const std::vector<weight_t> &weights, unsigned int pop_limit, const Hypergraph &hg);
+
+#endif // _INCREMENTAL_H_
diff --git a/decoder/lazy.h b/decoder/lazy.h
deleted file mode 100644
index 94895b19..00000000
--- a/decoder/lazy.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#ifndef _LAZY_H_
-#define _LAZY_H_
-
-#include "weights.h"
-#include <vector>
-
-class Hypergraph;
-
-void PassToLazy(const char *model_file, const std::vector<weight_t> &weights, unsigned int pop_limit, const Hypergraph &hg);
-
-#endif // _LAZY_H_