summaryrefslogtreecommitdiff
path: root/training
diff options
context:
space:
mode:
authorJonathan Clark <jon.h.clark@gmail.com>2011-05-03 12:13:21 -0400
committerJonathan Clark <jon.h.clark@gmail.com>2011-05-03 12:13:21 -0400
commit148c0d57317e097f94a1562452bc50cdc23408dc (patch)
tree9b973742f231b46eaf677b9084f012afff7a2ee8 /training
parente90d7179326d467d5b183645692be71ebfa9e070 (diff)
Fix cross entropy and perplexity to use base2 log throughout instead of log_e then 2^x
Diffstat (limited to 'training')
-rw-r--r--training/model1.cc11
1 files changed, 8 insertions, 3 deletions
diff --git a/training/model1.cc b/training/model1.cc
index 4023735c..b9590ece 100644
--- a/training/model1.cc
+++ b/training/model1.cc
@@ -129,10 +129,15 @@ int main(int argc, char** argv) {
likelihood += log(sum) + src_logprob;
}
}
+
+ // log(e) = 1.0
+ double base2_likelihood = likelihood / log(2);
+
if (flag) { cerr << endl; }
- cerr << " log likelihood: " << likelihood << endl;
- cerr << " cross entropy: " << (-likelihood / denom) << endl;
- cerr << " perplexity: " << pow(2.0, -likelihood / denom) << endl;
+ cerr << " log_e likelihood: " << likelihood << endl;
+ cerr << " log_2 likelihood: " << base2_likelihood << endl;
+ cerr << " cross entropy: " << (-base2_likelihood / denom) << endl;
+ cerr << " perplexity: " << pow(2.0, -base2_likelihood / denom) << endl;
if (!final_iteration) {
if (variational_bayes)
tt.NormalizeVB(alpha);