diff options
Diffstat (limited to 'training')
| -rw-r--r-- | training/Makefile.am | 1 | ||||
| -rw-r--r-- | training/entropy.cc | 41 | ||||
| -rw-r--r-- | training/entropy.h | 22 | 
3 files changed, 64 insertions, 0 deletions
diff --git a/training/Makefile.am b/training/Makefile.am index 68ebfab4..4cef0d5b 100644 --- a/training/Makefile.am +++ b/training/Makefile.am @@ -26,6 +26,7 @@ TESTS = lbfgs_test optimize_test  noinst_LIBRARIES = libtraining.a  libtraining_a_SOURCES = \    candidate_set.cc \ +  entropy.cc \    optimize.cc \    online_optimizer.cc \    risk.cc diff --git a/training/entropy.cc b/training/entropy.cc new file mode 100644 index 00000000..4fdbe2be --- /dev/null +++ b/training/entropy.cc @@ -0,0 +1,41 @@ +#include "entropy.h" + +#include "prob.h" +#include "candidate_set.h" + +using namespace std; + +namespace training { + +// see Mann and McCallum "Efficient Computation of Entropy Gradient ..." for +// a mostly clear derivation of: +//   g = E[ F(x,y) * log p(y|x) ] + H(y | x) * E[ F(x,y) ] +double CandidateSetEntropy::operator()(const vector<double>& params, +                                       SparseVector<double>* g) const { +  prob_t z; +  vector<double> dps(cands_.size()); +  for (unsigned i = 0; i < cands_.size(); ++i) { +    dps[i] = cands_[i].fmap.dot(params); +    const prob_t u(dps[i], init_lnx()); +    z += u; +  } +  const double log_z = log(z); + +  SparseVector<double> exp_feats; +  double entropy = 0; +  for (unsigned i = 0; i < cands_.size(); ++i) { +    const double log_prob = cands_[i].fmap.dot(params) - log_z; +    const double prob = exp(log_prob); +    const double e_logprob = prob * log_prob; +    entropy -= e_logprob; +    if (g) { +      (*g) += cands_[i].fmap * e_logprob; +      exp_feats += cands_[i].fmap * prob; +    } +  } +  if (g) (*g) += exp_feats * entropy; +  return entropy; +} + +} + diff --git a/training/entropy.h b/training/entropy.h new file mode 100644 index 00000000..796589ca --- /dev/null +++ b/training/entropy.h @@ -0,0 +1,22 @@ +#ifndef _CSENTROPY_H_ +#define _CSENTROPY_H_ + +#include <vector> +#include "sparse_vector.h" + +namespace training { +  class CandidateSet; + +  class CandidateSetEntropy { +   public: +    explicit CandidateSetEntropy(const CandidateSet& cs) : cands_(cs) {} +    // compute the entropy (expected log likelihood) of a CandidateSet +    // (optional) the gradient of the entropy with respect to params +    double operator()(const std::vector<double>& params, +                      SparseVector<double>* g = NULL) const; +   private: +    const CandidateSet& cands_; +  }; +}; + +#endif  | 
