diff options
Diffstat (limited to 'decoder')
| -rw-r--r-- | decoder/Jamfile | 2 | ||||
| -rw-r--r-- | decoder/decoder.cc | 4 | ||||
| -rw-r--r-- | decoder/lazy.cc | 78 | ||||
| -rw-r--r-- | decoder/lazy.h | 5 | 
4 files changed, 67 insertions, 22 deletions
| diff --git a/decoder/Jamfile b/decoder/Jamfile index da02d063..d778dc7f 100644 --- a/decoder/Jamfile +++ b/decoder/Jamfile @@ -58,10 +58,12 @@ lib decoder :    rescore_translator.cc    hg_remove_eps.cc    hg_union.cc +  lazy.cc    $(glc)    ..//utils    ..//mteval    ../klm/lm//kenlm +  ../klm/search//search    ..//boost_program_options    : <include>.    : : diff --git a/decoder/decoder.cc b/decoder/decoder.cc index a69a6d05..3a410cf2 100644 --- a/decoder/decoder.cc +++ b/decoder/decoder.cc @@ -38,6 +38,7 @@  #include "sampler.h"  #include "forest_writer.h" // TODO this section should probably be handled by an Observer +#include "lazy.h"  #include "hg_io.h"  #include "aligner.h" @@ -832,6 +833,9 @@ bool DecoderImpl::Decode(const string& input, DecoderObserver* o) {    if (conf.count("show_target_graph"))      HypergraphIO::WriteTarget(conf["show_target_graph"].as<string>(), sent_id, forest); +  if (conf.count("lazy_search")) +    PassToLazy(forest, CurrentWeightVector()); +    for (int pass = 0; pass < rescoring_passes.size(); ++pass) {      const RescoringPass& rp = rescoring_passes[pass];      const vector<weight_t>& cur_weights = *rp.weight_vector; diff --git a/decoder/lazy.cc b/decoder/lazy.cc index f5b61c75..4776c1b8 100644 --- a/decoder/lazy.cc +++ b/decoder/lazy.cc @@ -1,15 +1,23 @@  #include "hg.h"  #include "lazy.h" +#include "fdict.h"  #include "tdict.h"  #include "lm/enumerate_vocab.hh"  #include "lm/model.hh" +#include "search/config.hh" +#include "search/context.hh"  #include "search/edge.hh"  #include "search/vertex.hh" +#include "search/vertex_generator.hh"  #include "util/exception.hh" +#include <boost/scoped_ptr.hpp>  #include <boost/scoped_array.hpp> +#include <iostream> +#include <vector> +  namespace {  struct MapVocab : public lm::EnumerateVocab { @@ -19,13 +27,13 @@ struct MapVocab : public lm::EnumerateVocab {      // Do not call after Lookup.        void Add(lm::WordIndex index, const StringPiece &str) {        const WordID cdec_id = TD::Convert(str.as_string()); -      if (cdec_id >= out_->size()) out_.resize(cdec_id + 1); +      if (cdec_id >= out_.size()) out_.resize(cdec_id + 1);        out_[cdec_id] = index;      }      // Assumes Add has been called and will never be called again.        lm::WordIndex FromCDec(WordID id) const { -      return out_[out.size() > id ? id : 0]; +      return out_[out_.size() > id ? id : 0];      }    private: @@ -34,44 +42,50 @@ struct MapVocab : public lm::EnumerateVocab {  class LazyBase {    public: -    LazyBase() {} +    LazyBase(const std::vector<weight_t> &weights) :  +      cdec_weights_(weights), +      config_(search::Weights(weights[FD::Convert("KLanguageModel")], weights[FD::Convert("KLanguageModel_OOV")], weights[FD::Convert("WordPenalty")]), 1000) {}      virtual ~LazyBase() {}      virtual void Search(const Hypergraph &hg) const = 0; -    static LazyBase *Load(const char *model_file); +    static LazyBase *Load(const char *model_file, const std::vector<weight_t> &weights);    protected: -    lm::ngram::Config GetConfig() const { +    lm::ngram::Config GetConfig() {        lm::ngram::Config ret;        ret.enumerate_vocab = &vocab_;        return ret;      }      MapVocab vocab_; + +    const std::vector<weight_t> &cdec_weights_; + +    const search::Config config_;  };  template <class Model> class Lazy : public LazyBase {    public: -    explicit Lazy(const char *model_file) : m_(model_file, GetConfig()) {} +    Lazy(const char *model_file, const std::vector<weight_t> &weights) : LazyBase(weights), m_(model_file, GetConfig()) {}      void Search(const Hypergraph &hg) const;    private: -    void ConvertEdge(const Context<Model> &context, bool final, search::Vertex *vertices, const Hypergraph::Edge &in, search::Edge &out) const; +    void ConvertEdge(const search::Context<Model> &context, bool final, search::Vertex *vertices, const Hypergraph::Edge &in, search::Edge &out) const;      const Model m_;  }; -static LazyBase *LazyBase::Load(const char *model_file) { +LazyBase *LazyBase::Load(const char *model_file, const std::vector<weight_t> &weights) {    lm::ngram::ModelType model_type; -  if (!lm::ngram::RecognizeBinary(lm_name, model_type)) model_type = lm::ngram::PROBING; +  if (!lm::ngram::RecognizeBinary(model_file, model_type)) model_type = lm::ngram::PROBING;    switch (model_type) {      case lm::ngram::PROBING: -      return new Lazy<lm::ngram::ProbingModel>(model_file); +      return new Lazy<lm::ngram::ProbingModel>(model_file, weights);      case lm::ngram::REST_PROBING: -      return new Lazy<lm::ngram::RestProbingModel>(model_file); +      return new Lazy<lm::ngram::RestProbingModel>(model_file, weights);      default:        UTIL_THROW(util::Exception, "Sorry this lm type isn't supported yet.");    } @@ -80,25 +94,41 @@ static LazyBase *LazyBase::Load(const char *model_file) {  template <class Model> void Lazy<Model>::Search(const Hypergraph &hg) const {    boost::scoped_array<search::Vertex> out_vertices(new search::Vertex[hg.nodes_.size()]);    boost::scoped_array<search::Edge> out_edges(new search::Edge[hg.edges_.size()]); + +  search::Context<Model> context(config_, m_); +    for (unsigned int i = 0; i < hg.nodes_.size(); ++i) { -    search::Vertex *out_vertex = out_vertices[i]; +    search::Vertex &out_vertex = out_vertices[i];      const Hypergraph::EdgesVector &down_edges = hg.nodes_[i].in_edges_; -    for (unsigned int j = 0; j < edges.size(); ++j) { +    for (unsigned int j = 0; j < down_edges.size(); ++j) {        unsigned int edge_index = down_edges[j]; -      const Hypergraph::Edge &in_edge = hg.edges_[edge_index]; -      search::Edge &out_edge = out_edges[edge_index]; +      ConvertEdge(context, i == hg.nodes_.size() - 1, out_vertices.get(), hg.edges_[edge_index], out_edges[edge_index]); +      out_vertex.Add(out_edges[edge_index]);      } +    out_vertex.FinishedAdding(); +    search::VertexGenerator(context, out_vertex); +  } +  search::PartialVertex top = out_vertices[hg.nodes_.size() - 1].RootPartial();  +  if (top.Empty()) { +    std::cout << "NO PATH FOUND"; +  } else { +    search::PartialVertex continuation; +    while (!top.Complete()) { +      top.Split(continuation); +      top = continuation; +    } +    std::cout << top.End().Bound() << std::endl;    }  }  // TODO: get weights into here somehow.   -template <class Model> void Lazy<Model>::ConvertEdge(const Context<Model> &context, bool final, search::Vertices *vertices, const Hypergraph::Edge &in, search::Edge &out) const { -  const std::vector<WordID> &e = in_edge.rule_->e(); +template <class Model> void Lazy<Model>::ConvertEdge(const search::Context<Model> &context, bool final, search::Vertex *vertices, const Hypergraph::Edge &in, search::Edge &out) const { +  const std::vector<WordID> &e = in.rule_->e();    std::vector<lm::WordIndex> words;    unsigned int terminals = 0;    for (std::vector<WordID>::const_iterator word = e.begin(); word != e.end(); ++word) {      if (*word <= 0) { -      out.Add(vertices[edge.tail_nodes_[-*word]]); +      out.Add(vertices[in.tail_nodes_[-*word]]);        words.push_back(lm::kMaxWordIndex);      } else {        ++terminals; @@ -110,13 +140,19 @@ template <class Model> void Lazy<Model>::ConvertEdge(const Context<Model> &conte      words.push_back(m_.GetVocabulary().EndSentence());    } -  float additive = edge.rule_->GetFeatureValues().dot(weight_vector); +  float additive = in.rule_->GetFeatureValues().dot(cdec_weights_); +  additive -= terminals * context.GetWeights().WordPenalty() * static_cast<float>(terminals) / M_LN10;    out.InitRule().Init(context, additive, words, final);  } -} // namespace +boost::scoped_ptr<LazyBase> AwfulGlobalLazy; -void PassToLazy(const Hypergraph &hg) { +} // namespace +void PassToLazy(const Hypergraph &hg, const std::vector<weight_t> &weights) { +  if (!AwfulGlobalLazy.get()) { +    AwfulGlobalLazy.reset(LazyBase::Load("lm", weights)); +  } +  AwfulGlobalLazy->Search(hg);  } diff --git a/decoder/lazy.h b/decoder/lazy.h index aecd030d..3e71a3b0 100644 --- a/decoder/lazy.h +++ b/decoder/lazy.h @@ -1,8 +1,11 @@  #ifndef _LAZY_H_  #define _LAZY_H_ +#include "weights.h" +#include <vector> +  class Hypergraph; -void PassToLazy(const Hypergraph &hg); +void PassToLazy(const Hypergraph &hg, const std::vector<weight_t> &weights);  #endif // _LAZY_H_ | 
