summaryrefslogtreecommitdiff
path: root/training
diff options
context:
space:
mode:
authorJonathan Clark <jon.h.clark@gmail.com>2011-05-03 12:13:21 -0400
committerJonathan Clark <jon.h.clark@gmail.com>2011-05-03 12:13:21 -0400
commitae556ddeb17a9bd541cdaa7b19ba41773c91a5af (patch)
tree5fe2306fac9a9999f73c0cfbf00518622c94dd1f /training
parent18e422246474d890405c0ccfd0671ec7c1330f9f (diff)
Fix cross entropy and perplexity to use base2 log throughout instead of log_e then 2^x
Diffstat (limited to 'training')
-rw-r--r--training/model1.cc11
1 files changed, 8 insertions, 3 deletions
diff --git a/training/model1.cc b/training/model1.cc
index 4023735c..b9590ece 100644
--- a/training/model1.cc
+++ b/training/model1.cc
@@ -129,10 +129,15 @@ int main(int argc, char** argv) {
likelihood += log(sum) + src_logprob;
}
}
+
+ // log(e) = 1.0
+ double base2_likelihood = likelihood / log(2);
+
if (flag) { cerr << endl; }
- cerr << " log likelihood: " << likelihood << endl;
- cerr << " cross entropy: " << (-likelihood / denom) << endl;
- cerr << " perplexity: " << pow(2.0, -likelihood / denom) << endl;
+ cerr << " log_e likelihood: " << likelihood << endl;
+ cerr << " log_2 likelihood: " << base2_likelihood << endl;
+ cerr << " cross entropy: " << (-base2_likelihood / denom) << endl;
+ cerr << " perplexity: " << pow(2.0, -base2_likelihood / denom) << endl;
if (!final_iteration) {
if (variational_bayes)
tt.NormalizeVB(alpha);