summaryrefslogtreecommitdiff
path: root/dtrain/kbestget.h
blob: cf466fe4d611ea309ceb8bdd5b31248f3057f03d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
#ifndef _DTRAIN_KBESTGET_H_
#define _DTRAIN_KBESTGET_H_


#include "kbest.h"


namespace dtrain
{


/*
 * KBestList
 *
 */
struct KBestList {
  vector<SparseVector<double> > feats;
  vector<vector<WordID> > sents;
  vector<double> model_scores;
  vector<double> scores;
  size_t GetSize() { return sents.size(); }
};


/*
 * KBestGetter
 *
 */
struct KBestGetter : public DecoderObserver
{
  const size_t k_;
  const string filter_type;
  KBestList kb;

  KBestGetter( const size_t k, const string filter_type ) :
    k_(k), filter_type(filter_type) {}

  virtual void
  NotifyTranslationForest( const SentenceMetadata& smeta, Hypergraph* hg )
  {
    KBest( *hg );
  }

  KBestList* GetKBest() { return &kb; }

  void
  KBest( const Hypergraph& forest )
  {
    if ( filter_type == "unique" ) {
      KBestUnique( forest );
    } else if ( filter_type == "no" ) {
      KBestNoFilter( forest );
    }
  }

  void
  KBestUnique( const Hypergraph& forest )
  {
    kb.sents.clear();
    kb.feats.clear();
    kb.model_scores.clear();
    kb.scores.clear();
    KBest::KBestDerivations<vector<WordID>, ESentenceTraversal, KBest::FilterUnique, prob_t, EdgeProb> kbest( forest, k_ );
    for ( size_t i = 0; i < k_; ++i ) {
      const KBest::KBestDerivations<vector<WordID>, ESentenceTraversal, KBest::FilterUnique, prob_t, EdgeProb>::Derivation* d =
            kbest.LazyKthBest( forest.nodes_.size() - 1, i );
      if (!d) break;
      kb.sents.push_back( d->yield);
      kb.feats.push_back( d->feature_values );
      kb.model_scores.push_back( log(d->score) );
    }
  }

  void
  KBestNoFilter( const Hypergraph& forest )
  {
    kb.sents.clear();
    kb.feats.clear();
    kb.model_scores.clear();
    kb.scores.clear();
    KBest::KBestDerivations<vector<WordID>, ESentenceTraversal> kbest( forest, k_ );
    for ( size_t i = 0; i < k_; ++i ) {
      const KBest::KBestDerivations<vector<WordID>, ESentenceTraversal>::Derivation* d =
            kbest.LazyKthBest( forest.nodes_.size() - 1, i );
      if (!d) break;
      kb.sents.push_back( d->yield);
      kb.feats.push_back( d->feature_values );
      kb.model_scores.push_back( log(d->score) );
    }
  }
};


} // namespace


#endif