summaryrefslogtreecommitdiff
path: root/training/entropy.cc
diff options
context:
space:
mode:
authorChris Dyer <cdyer@allegro.clab.cs.cmu.edu>2012-11-18 13:35:42 -0500
committerChris Dyer <cdyer@allegro.clab.cs.cmu.edu>2012-11-18 13:35:42 -0500
commit8aa29810bb77611cc20b7a384897ff6703783ea1 (patch)
tree8635daa8fffb3f2cd90e30b41e27f4f9e0909447 /training/entropy.cc
parentfbdacabc85bea65d735f2cb7f92b98e08ce72d04 (diff)
major restructure of the training code
Diffstat (limited to 'training/entropy.cc')
-rw-r--r--training/entropy.cc41
1 files changed, 0 insertions, 41 deletions
diff --git a/training/entropy.cc b/training/entropy.cc
deleted file mode 100644
index 4fdbe2be..00000000
--- a/training/entropy.cc
+++ /dev/null
@@ -1,41 +0,0 @@
-#include "entropy.h"
-
-#include "prob.h"
-#include "candidate_set.h"
-
-using namespace std;
-
-namespace training {
-
-// see Mann and McCallum "Efficient Computation of Entropy Gradient ..." for
-// a mostly clear derivation of:
-// g = E[ F(x,y) * log p(y|x) ] + H(y | x) * E[ F(x,y) ]
-double CandidateSetEntropy::operator()(const vector<double>& params,
- SparseVector<double>* g) const {
- prob_t z;
- vector<double> dps(cands_.size());
- for (unsigned i = 0; i < cands_.size(); ++i) {
- dps[i] = cands_[i].fmap.dot(params);
- const prob_t u(dps[i], init_lnx());
- z += u;
- }
- const double log_z = log(z);
-
- SparseVector<double> exp_feats;
- double entropy = 0;
- for (unsigned i = 0; i < cands_.size(); ++i) {
- const double log_prob = cands_[i].fmap.dot(params) - log_z;
- const double prob = exp(log_prob);
- const double e_logprob = prob * log_prob;
- entropy -= e_logprob;
- if (g) {
- (*g) += cands_[i].fmap * e_logprob;
- exp_feats += cands_[i].fmap * prob;
- }
- }
- if (g) (*g) += exp_feats * entropy;
- return entropy;
-}
-
-}
-