summaryrefslogtreecommitdiff
path: root/decoder
diff options
context:
space:
mode:
authorChris Dyer <cdyer@cs.cmu.edu>2011-11-11 17:12:39 -0500
committerChris Dyer <cdyer@cs.cmu.edu>2011-11-11 17:12:39 -0500
commit105a52a8d37497fe69a01a7de771ef9b9300cd71 (patch)
treef20c4cc8bf31ccf1ce7204301bfa169c6fa080a7 /decoder
parentb4fd470d2cb80b0c88d4210f7e5bb10d2aa4531d (diff)
optionally sample from forest to get training instances, rather than k-best it
Diffstat (limited to 'decoder')
-rw-r--r--decoder/Makefile.am1
-rw-r--r--decoder/hg_sampler.cc73
-rw-r--r--decoder/hg_sampler.h27
3 files changed, 101 insertions, 0 deletions
diff --git a/decoder/Makefile.am b/decoder/Makefile.am
index 6b9360d8..30eaf04d 100644
--- a/decoder/Makefile.am
+++ b/decoder/Makefile.am
@@ -51,6 +51,7 @@ libcdec_a_SOURCES = \
hg_io.cc \
decoder.cc \
hg_intersect.cc \
+ hg_sampler.cc \
factored_lexicon_helper.cc \
viterbi.cc \
lattice.cc \
diff --git a/decoder/hg_sampler.cc b/decoder/hg_sampler.cc
new file mode 100644
index 00000000..cdf0ec3c
--- /dev/null
+++ b/decoder/hg_sampler.cc
@@ -0,0 +1,73 @@
+#include "hg_sampler.h"
+
+#include <queue>
+
+#include "viterbi.h"
+#include "inside_outside.h"
+
+using namespace std;
+
+struct SampledDerivationWeightFunction {
+ typedef double Weight;
+ explicit SampledDerivationWeightFunction(const vector<bool>& sampled) : sampled_edges(sampled) {}
+ double operator()(const Hypergraph::Edge& e) const {
+ return static_cast<double>(sampled_edges[e.id_]);
+ }
+ const vector<bool>& sampled_edges;
+};
+
+void HypergraphSampler::sample_hypotheses(const Hypergraph& hg,
+ unsigned n,
+ MT19937* rng,
+ vector<Hypothesis>* hypos) {
+ hypos->clear();
+ hypos->resize(n);
+
+ // compute inside probabilities
+ vector<prob_t> node_probs;
+ Inside<prob_t, EdgeProb>(hg, &node_probs, EdgeProb());
+
+ vector<bool> sampled_edges(hg.edges_.size());
+ queue<unsigned> q;
+ SampleSet<prob_t> ss;
+ for (unsigned i = 0; i < n; ++i) {
+ fill(sampled_edges.begin(), sampled_edges.end(), false);
+ // sample derivation top down
+ assert(q.empty());
+ Hypothesis& hyp = (*hypos)[i];
+ SparseVector<double>& deriv_features = hyp.fmap;
+ q.push(hg.nodes_.size() - 1);
+ prob_t& model_score = hyp.model_score;
+ model_score = prob_t::One();
+ while(!q.empty()) {
+ unsigned cur_node_id = q.front();
+ q.pop();
+ const Hypergraph::Node& node = hg.nodes_[cur_node_id];
+ const unsigned num_in_edges = node.in_edges_.size();
+ unsigned sampled_edge_idx = 0;
+ if (num_in_edges == 1) {
+ sampled_edge_idx = node.in_edges_[0];
+ } else {
+ assert(num_in_edges > 1);
+ ss.clear();
+ for (unsigned j = 0; j < num_in_edges; ++j) {
+ const Hypergraph::Edge& edge = hg.edges_[node.in_edges_[j]];
+ prob_t p = edge.edge_prob_; // edge weight
+ for (unsigned k = 0; k < edge.tail_nodes_.size(); ++k)
+ p *= node_probs[edge.tail_nodes_[k]]; // tail node inside weight
+ ss.add(p);
+ }
+ sampled_edge_idx = node.in_edges_[rng->SelectSample(ss)];
+ }
+ sampled_edges[sampled_edge_idx] = true;
+ const Hypergraph::Edge& sampled_edge = hg.edges_[sampled_edge_idx];
+ deriv_features += sampled_edge.feature_values_;
+ model_score *= sampled_edge.edge_prob_;
+ //sampled_deriv->push_back(sampled_edge_idx);
+ for (unsigned j = 0; j < sampled_edge.tail_nodes_.size(); ++j) {
+ q.push(sampled_edge.tail_nodes_[j]);
+ }
+ }
+ Viterbi(hg, &hyp.words, ESentenceTraversal(), SampledDerivationWeightFunction(sampled_edges));
+ }
+}
diff --git a/decoder/hg_sampler.h b/decoder/hg_sampler.h
new file mode 100644
index 00000000..bf4e1eb0
--- /dev/null
+++ b/decoder/hg_sampler.h
@@ -0,0 +1,27 @@
+#ifndef _HG_SAMPLER_H_
+#define _HG_SAMPLER_H_
+
+
+#include <vector>
+#include "sparse_vector.h"
+#include "sampler.h"
+#include "wordid.h"
+
+class Hypergraph;
+
+struct HypergraphSampler {
+
+ struct Hypothesis {
+ std::vector<WordID> words;
+ SparseVector<double> fmap;
+ prob_t model_score; // log unnormalized probability
+ };
+
+ static void
+ sample_hypotheses(const Hypergraph& hg,
+ unsigned n, // how many samples to draw
+ MT19937* rng,
+ std::vector<Hypothesis>* hypos);
+};
+
+#endif