From 535cfe8da7c6107d8415afd1381d59f2a6b9844f Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Sun, 24 Jun 2012 15:52:30 -0400 Subject: compute risk / gradient of risk --- training/risk.cc | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 training/risk.cc (limited to 'training/risk.cc') diff --git a/training/risk.cc b/training/risk.cc new file mode 100644 index 00000000..347ed3cb --- /dev/null +++ b/training/risk.cc @@ -0,0 +1,43 @@ +#include "risk.h" + +#include "prob.h" +#include "candidate_set.h" +#include "ns.h" + +using namespace std; + +namespace training { + +// g = \sum_e p(e|f) * loss(e) * (phi(e,f) - E[phi(e,f)]) +double CandidateSetRisk::operator()(const vector& params, + SparseVector* g) const { + prob_t z; + for (unsigned i = 0; i < cands_.size(); ++i) { + const prob_t u(cands_[i].fmap.dot(params), init_lnx()); + z += u; + } + const double log_z = log(z); + + SparseVector exp_feats; + if (g) { + for (unsigned i = 0; i < cands_.size(); ++i) { + const double log_prob = cands_[i].fmap.dot(params) - log_z; + const double prob = exp(log_prob); + exp_feats += cands_[i].fmap * prob; + } + } + + double risk = 0; + for (unsigned i = 0; i < cands_.size(); ++i) { + const double log_prob = cands_[i].fmap.dot(params) - log_z; + const double prob = exp(log_prob); + const double r = prob * metric_.ComputeScore(cands_[i].eval_feats); + risk += r; + if (g) (*g) += (cands_[i].fmap - exp_feats) * r; + } + return risk; +} + +} + + -- cgit v1.2.3