diff options
| author | Chris Dyer <cdyer@cs.cmu.edu> | 2012-06-29 18:45:26 -0700 | 
|---|---|---|
| committer | Chris Dyer <cdyer@cs.cmu.edu> | 2012-06-29 18:45:26 -0700 | 
| commit | 3044d6d1c6d428e8d06c255e3a2d739bcd187679 (patch) | |
| tree | 81baa7315011a57ee363f93f0aa3e1e94affe5d1 /training/entropy.h | |
| parent | c84ef9590d11819b7f8441a53b1699a912d949e1 (diff) | |
add option for entropy optimization
Diffstat (limited to 'training/entropy.h')
| -rw-r--r-- | training/entropy.h | 22 | 
1 files changed, 22 insertions, 0 deletions
| diff --git a/training/entropy.h b/training/entropy.h new file mode 100644 index 00000000..796589ca --- /dev/null +++ b/training/entropy.h @@ -0,0 +1,22 @@ +#ifndef _CSENTROPY_H_ +#define _CSENTROPY_H_ + +#include <vector> +#include "sparse_vector.h" + +namespace training { +  class CandidateSet; + +  class CandidateSetEntropy { +   public: +    explicit CandidateSetEntropy(const CandidateSet& cs) : cands_(cs) {} +    // compute the entropy (expected log likelihood) of a CandidateSet +    // (optional) the gradient of the entropy with respect to params +    double operator()(const std::vector<double>& params, +                      SparseVector<double>* g = NULL) const; +   private: +    const CandidateSet& cands_; +  }; +}; + +#endif | 
