#ifndef _DTRAIN_SCORE_H_ #define _DTRAIN_SCORE_H_ #include "dtrain.h" namespace dtrain { struct NgramCounts { size_t N_; map clipped; map sum; NgramCounts() {} NgramCounts(const size_t N) : N_(N) { zero(); } inline void operator+=(const NgramCounts& rhs) { if (rhs.N_ > N_) resize(rhs.N_); for (size_t i = 0; i < N_; i++) { this->clipped[i] += rhs.clipped.find(i)->second; this->sum[i] += rhs.sum.find(i)->second; } } inline void operator*=(const weight_t rhs) { for (size_t i=0; iclipped[i] *= rhs; this->sum[i] *= rhs; } } inline void add(const size_t count, const size_t count_ref, const size_t i) { assert(i < N_); if (count > count_ref) { clipped[i] += count_ref; } else { clipped[i] += count; } sum[i] += count; } inline void zero() { for (size_t i=0; i N_) { for (size_t i = N_; i < N; i++) { clipped[i] = 0.; sum[i] = 0.; } } else { // N < N_ for (size_t i = N_-1; i > N-1; i--) { clipped.erase(i); sum.erase(i); } } N_ = N; } }; typedef map, size_t> Ngrams; inline Ngrams ngrams(const vector& vw, const size_t N) { Ngrams r; vector ng; for (size_t i=0; i& hyp, const vector& ngrams_ref, const size_t N) { Ngrams ngrams_hyp = ngrams(hyp, N); NgramCounts counts(N); Ngrams::iterator it, ti; for (it = ngrams_hyp.begin(); it != ngrams_hyp.end(); it++) { size_t max_ref_count = 0; for (auto r: ngrams_ref) { ti = r.find(it->first); if (ti != r.end()) max_ref_count = max(max_ref_count, ti->second); } counts.add(it->second, min(it->second, max_ref_count), it->first.size()-1); } return counts; } class Scorer { protected: const size_t N_; vector w_; public: Scorer(size_t n): N_(n) { for (size_t i = 1; i <= N_; i++) w_.push_back(1.0/N_); } inline bool init(const vector& hyp, const vector& reference_ngrams, const vector& reference_lengths, size_t& hl, size_t& rl, size_t& M, vector& v, NgramCounts& counts) { hl = hyp.size(); if (hl == 0) return false; rl = best_match_length(hl, reference_lengths); if (rl == 0) return false; counts = ngram_counts(hyp, reference_ngrams, N_); if (rl < N_) { M = rl; for (size_t i = 0; i < M; i++) v.push_back(1/((weight_t)M)); } else { M = N_; v = w_; } return true; } inline weight_t brevity_penalty(const size_t hl, const size_t rl) { if (hl > rl) return 1; return exp(1 - (weight_t)rl/hl); } inline size_t best_match_length(const size_t hl, const vector& reference_lengths) { size_t m; if (reference_lengths.size() == 1) { m = reference_lengths.front(); } else { size_t i = 0, best_idx = 0; size_t best = numeric_limits::max(); for (auto l: reference_lengths) { size_t d = abs(hl-l); if (d < best) { best_idx = i; best = d; } i += 1; } m = reference_lengths[best_idx]; } return m; } virtual weight_t score(const vector&, const vector&, const vector&) = 0; void update_context(const vector& /*hyp*/, const vector& /*reference_ngrams*/, const vector& /*reference_lengths*/, weight_t /*decay*/) {} }; /* * ['fixed'] per-sentence BLEU * simply add 'fix' (1) to reference length for calculation of BP * to avoid short translations * * as in "Optimizing for Sentence-Level BLEU+1 * Yields Short Translations" * (Nakov et al. '12) * */ class NakovBleuScorer : public Scorer { weight_t fix; public: NakovBleuScorer(size_t n, weight_t fix) : Scorer(n), fix(fix) {} weight_t score(const vector& hyp, const vector& reference_ngrams, const vector& reference_lengths) { size_t hl, rl, M; vector v; NgramCounts counts; if (!init(hyp, reference_ngrams, reference_lengths, hl, rl, M, v, counts)) return 0.; weight_t sum=0, add=0; for (size_t i=0; i 0) add = 1; sum += v[i] * log(((weight_t)counts.clipped[i] + add) / ((counts.sum[i] + add))); } return brevity_penalty(hl, rl+1) * exp(sum); } }; /* * BLEU * 0 if for one n \in {1..N} count is 0 * * as in "BLEU: a Method for Automatic Evaluation * of Machine Translation" * (Papineni et al. '02) * */ class PapineniBleuScorer : public Scorer { public: PapineniBleuScorer(size_t n) : Scorer(n) {} weight_t score(const vector& hyp, const vector& reference_ngrams, const vector& reference_lengths) { size_t hl, rl, M; vector v; NgramCounts counts; if (!init(hyp, reference_ngrams, reference_lengths, hl, rl, M, v, counts)) return 0.; weight_t sum = 0; for (size_t i=0; i& hyp, const vector& reference_ngrams, const vector& reference_lengths) { size_t hl, rl, M; vector v; NgramCounts counts; if (!init(hyp, reference_ngrams, reference_lengths, hl, rl, M, v, counts)) return 0.; weight_t sum=0, add=0; for (size_t i=0; i& hyp, const vector& reference_ngrams, const vector& reference_lengths) { size_t hl=hyp.size(), rl=best_match_length(hl, reference_lengths); if (hl == 0 || rl == 0) return 0.; NgramCounts counts = ngram_counts(hyp, reference_ngrams, N_); size_t M = N_; if (rl < N_) M = rl; weight_t sum = 0.; vector i_bleu; for (size_t i=0; i& hyp, const vector& reference_ngrams, const vector& reference_lengths) { size_t hl, rl, M; vector v; NgramCounts counts; if (!init(hyp, reference_ngrams, reference_lengths, hl, rl, M, v, counts)) return 0.; counts += context; weight_t sum = 0; for (size_t i = 0; i < M; i++) { if (counts.sum[i]==0 || counts.clipped[i]==0) return 0.; sum += v[i] * log((weight_t)counts.clipped[i] / counts.sum[i]); } return brevity_penalty(hyp_sz_sum+hl, ref_sz_sum+rl) * exp(sum); } void update_context(const vector& hyp, const vector& reference_ngrams, const vector& reference_lengths, weight_t decay=0.9) { size_t hl, rl, M; vector v; NgramCounts counts; init(hyp, reference_ngrams, reference_lengths, hl, rl, M, v, counts); context += counts; context *= decay; hyp_sz_sum += hl; hyp_sz_sum *= decay; ref_sz_sum += rl; ref_sz_sum *= decay; } }; /* * 'sum' bleu * * Merely sum up Ngram precisions */ class SumBleuScorer : public Scorer { public: SumBleuScorer(size_t n) : Scorer(n) {} weight_t score(const vector& hyp, const vector& reference_ngrams, const vector& reference_lengths) { size_t hl, rl, M; vector v; NgramCounts counts; if (!init(hyp, reference_ngrams, reference_lengths, hl, rl, M, v, counts)) return 0.; weight_t sum = 0.; size_t j = 1; for (size_t i=0; i