From 1d5a0055a948663d799b4c5b1380ce1d9742bf6b Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Mon, 5 Mar 2012 14:51:04 -0500 Subject: support strength=0 PYPs, final notation clean-up --- gi/pf/align-lexonly-pyp.cc | 2 +- gi/pf/conditional_pseg.h | 2 +- gi/pf/learn_cfg.cc | 4 ++-- gi/pf/pyp_lm.cc | 22 +++++++++++----------- 4 files changed, 15 insertions(+), 15 deletions(-) (limited to 'gi') diff --git a/gi/pf/align-lexonly-pyp.cc b/gi/pf/align-lexonly-pyp.cc index 4ce7cf62..87f7f6b5 100644 --- a/gi/pf/align-lexonly-pyp.cc +++ b/gi/pf/align-lexonly-pyp.cc @@ -104,7 +104,7 @@ struct HierarchicalWordBase { } void Summary() const { - cerr << "NUMBER OF CUSTOMERS: " << r.num_customers() << " (d=" << r.discount() << ",\\alpha=" << r.alpha() << ')' << endl; + cerr << "NUMBER OF CUSTOMERS: " << r.num_customers() << " (d=" << r.discount() << ",s=" << r.strength() << ')' << endl; for (MFCR >::const_iterator it = r.begin(); it != r.end(); ++it) cerr << " " << it->second.total_dish_count_ << " (on " << it->second.table_counts_.size() << " tables)" << TD::GetString(it->first) << endl; } diff --git a/gi/pf/conditional_pseg.h b/gi/pf/conditional_pseg.h index f9841cbf..86403d8d 100644 --- a/gi/pf/conditional_pseg.h +++ b/gi/pf/conditional_pseg.h @@ -22,7 +22,7 @@ struct MConditionalTranslationModel { void Summary() const { std::cerr << "Number of conditioning contexts: " << r.size() << std::endl; for (RuleModelHash::const_iterator it = r.begin(); it != r.end(); ++it) { - std::cerr << TD::GetString(it->first) << " \t(d=" << it->second.discount() << ",\\alpha = " << it->second.alpha() << ") --------------------------" << std::endl; + std::cerr << TD::GetString(it->first) << " \t(d=" << it->second.discount() << ",s=" << it->second.strength() << ") --------------------------" << std::endl; for (MFCR::const_iterator i2 = it->second.begin(); i2 != it->second.end(); ++i2) std::cerr << " " << -1 << '\t' << i2->first << std::endl; } diff --git a/gi/pf/learn_cfg.cc b/gi/pf/learn_cfg.cc index 5b748311..bf157828 100644 --- a/gi/pf/learn_cfg.cc +++ b/gi/pf/learn_cfg.cc @@ -183,9 +183,9 @@ struct HieroLMModel { nts[i].resample_hyperparameters(rng); if (kHIERARCHICAL_PRIOR) { q0.resample_hyperparameters(rng); - cerr << "[base d=" << q0.discount() << ", alpha=" << q0.alpha() << "]"; + cerr << "[base d=" << q0.discount() << ", s=" << q0.strength() << "]"; } - cerr << " d=" << nts[0].discount() << ", alpha=" << nts[0].alpha() << endl; + cerr << " d=" << nts[0].discount() << ", s=" << nts[0].strength() << endl; } const BaseRuleModel base; diff --git a/gi/pf/pyp_lm.cc b/gi/pf/pyp_lm.cc index e5c44c8b..7ebada13 100644 --- a/gi/pf/pyp_lm.cc +++ b/gi/pf/pyp_lm.cc @@ -78,14 +78,14 @@ template struct PYPLM { backoff(vs, da, db, ss, sr), discount_a(da), discount_b(db), strength_s(ss), strength_r(sr), - d(0.8), alpha(1.0), lookup(N-1) {} + d(0.8), strength(1.0), lookup(N-1) {} void increment(WordID w, const vector& context, MT19937* rng) { const double bo = backoff.prob(w, context); for (unsigned i = 0; i < N-1; ++i) lookup[i] = context[context.size() - 1 - i]; typename unordered_map, CCRP, boost::hash > >::iterator it = p.find(lookup); if (it == p.end()) - it = p.insert(make_pair(lookup, CCRP(d,alpha))).first; + it = p.insert(make_pair(lookup, CCRP(d,strength))).first; if (it->second.increment(w, bo, rng)) backoff.increment(w, context, rng); } @@ -107,7 +107,7 @@ template struct PYPLM { } double log_likelihood() const { - return log_likelihood(d, alpha) + backoff.log_likelihood(); + return log_likelihood(d, strength) + backoff.log_likelihood(); } double log_likelihood(const double& dd, const double& aa) const { @@ -125,15 +125,15 @@ template struct PYPLM { DiscountResampler(const PYPLM& m) : m_(m) {} const PYPLM& m_; double operator()(const double& proposed_discount) const { - return m_.log_likelihood(proposed_discount, m_.alpha); + return m_.log_likelihood(proposed_discount, m_.strength); } }; struct AlphaResampler { AlphaResampler(const PYPLM& m) : m_(m) {} const PYPLM& m_; - double operator()(const double& proposed_alpha) const { - return m_.log_likelihood(m_.d, proposed_alpha); + double operator()(const double& proposed_strength) const { + return m_.log_likelihood(m_.d, proposed_strength); } }; @@ -141,25 +141,25 @@ template struct PYPLM { DiscountResampler dr(*this); AlphaResampler ar(*this); for (int iter = 0; iter < nloop; ++iter) { - alpha = slice_sampler1d(ar, alpha, *rng, 0.0, + strength = slice_sampler1d(ar, strength, *rng, 0.0, std::numeric_limits::infinity(), 0.0, niterations, 100*niterations); d = slice_sampler1d(dr, d, *rng, std::numeric_limits::min(), 1.0, 0.0, niterations, 100*niterations); } - alpha = slice_sampler1d(ar, alpha, *rng, 0.0, + strength = slice_sampler1d(ar, strength, *rng, 0.0, std::numeric_limits::infinity(), 0.0, niterations, 100*niterations); typename unordered_map, CCRP, boost::hash > >::iterator it; - cerr << "PYPLM<" << N << ">(d=" << d << ",a=" << alpha << ") = " << log_likelihood(d, alpha) << endl; + cerr << "PYPLM<" << N << ">(d=" << d << ",a=" << strength << ") = " << log_likelihood(d, strength) << endl; for (it = p.begin(); it != p.end(); ++it) { it->second.set_discount(d); - it->second.set_alpha(alpha); + it->second.set_strength(strength); } backoff.resample_hyperparameters(rng, nloop, niterations); } PYPLM backoff; double discount_a, discount_b, strength_s, strength_r; - double d, alpha; + double d, strength; mutable vector lookup; // thread-local unordered_map, CCRP, boost::hash > > p; }; -- cgit v1.2.3