From 148c0d57317e097f94a1562452bc50cdc23408dc Mon Sep 17 00:00:00 2001 From: Jonathan Clark Date: Tue, 3 May 2011 12:13:21 -0400 Subject: Fix cross entropy and perplexity to use base2 log throughout instead of log_e then 2^x --- training/model1.cc | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) (limited to 'training') diff --git a/training/model1.cc b/training/model1.cc index 4023735c..b9590ece 100644 --- a/training/model1.cc +++ b/training/model1.cc @@ -129,10 +129,15 @@ int main(int argc, char** argv) { likelihood += log(sum) + src_logprob; } } + + // log(e) = 1.0 + double base2_likelihood = likelihood / log(2); + if (flag) { cerr << endl; } - cerr << " log likelihood: " << likelihood << endl; - cerr << " cross entropy: " << (-likelihood / denom) << endl; - cerr << " perplexity: " << pow(2.0, -likelihood / denom) << endl; + cerr << " log_e likelihood: " << likelihood << endl; + cerr << " log_2 likelihood: " << base2_likelihood << endl; + cerr << " cross entropy: " << (-base2_likelihood / denom) << endl; + cerr << " perplexity: " << pow(2.0, -base2_likelihood / denom) << endl; if (!final_iteration) { if (variational_bayes) tt.NormalizeVB(alpha); -- cgit v1.2.3