From 1b8181bf0d6e9137e6b9ccdbe414aec37377a1a9 Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Sun, 18 Nov 2012 13:35:42 -0500 Subject: major restructure of the training code --- training/utils/entropy.cc | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 training/utils/entropy.cc (limited to 'training/utils/entropy.cc') diff --git a/training/utils/entropy.cc b/training/utils/entropy.cc new file mode 100644 index 00000000..4fdbe2be --- /dev/null +++ b/training/utils/entropy.cc @@ -0,0 +1,41 @@ +#include "entropy.h" + +#include "prob.h" +#include "candidate_set.h" + +using namespace std; + +namespace training { + +// see Mann and McCallum "Efficient Computation of Entropy Gradient ..." for +// a mostly clear derivation of: +// g = E[ F(x,y) * log p(y|x) ] + H(y | x) * E[ F(x,y) ] +double CandidateSetEntropy::operator()(const vector& params, + SparseVector* g) const { + prob_t z; + vector dps(cands_.size()); + for (unsigned i = 0; i < cands_.size(); ++i) { + dps[i] = cands_[i].fmap.dot(params); + const prob_t u(dps[i], init_lnx()); + z += u; + } + const double log_z = log(z); + + SparseVector exp_feats; + double entropy = 0; + for (unsigned i = 0; i < cands_.size(); ++i) { + const double log_prob = cands_[i].fmap.dot(params) - log_z; + const double prob = exp(log_prob); + const double e_logprob = prob * log_prob; + entropy -= e_logprob; + if (g) { + (*g) += cands_[i].fmap * e_logprob; + exp_feats += cands_[i].fmap * prob; + } + } + if (g) (*g) += exp_feats * entropy; + return entropy; +} + +} + -- cgit v1.2.3