diff options
author | Jonathan Clark <jon.h.clark@gmail.com> | 2011-05-03 12:13:21 -0400 |
---|---|---|
committer | Jonathan Clark <jon.h.clark@gmail.com> | 2011-05-03 12:13:21 -0400 |
commit | ae556ddeb17a9bd541cdaa7b19ba41773c91a5af (patch) | |
tree | 5fe2306fac9a9999f73c0cfbf00518622c94dd1f | |
parent | 18e422246474d890405c0ccfd0671ec7c1330f9f (diff) |
Fix cross entropy and perplexity to use base2 log throughout instead of log_e then 2^x
-rw-r--r-- | training/model1.cc | 11 |
1 files changed, 8 insertions, 3 deletions
diff --git a/training/model1.cc b/training/model1.cc index 4023735c..b9590ece 100644 --- a/training/model1.cc +++ b/training/model1.cc @@ -129,10 +129,15 @@ int main(int argc, char** argv) { likelihood += log(sum) + src_logprob; } } + + // log(e) = 1.0 + double base2_likelihood = likelihood / log(2); + if (flag) { cerr << endl; } - cerr << " log likelihood: " << likelihood << endl; - cerr << " cross entropy: " << (-likelihood / denom) << endl; - cerr << " perplexity: " << pow(2.0, -likelihood / denom) << endl; + cerr << " log_e likelihood: " << likelihood << endl; + cerr << " log_2 likelihood: " << base2_likelihood << endl; + cerr << " cross entropy: " << (-base2_likelihood / denom) << endl; + cerr << " perplexity: " << pow(2.0, -base2_likelihood / denom) << endl; if (!final_iteration) { if (variational_bayes) tt.NormalizeVB(alpha); |