summaryrefslogtreecommitdiff
path: root/gi
diff options
context:
space:
mode:
Diffstat (limited to 'gi')
-rw-r--r--gi/pf/align-lexonly-pyp.cc1
-rw-r--r--gi/pf/pyp_tm.cc7
-rw-r--r--gi/pf/pyp_word_model.h2
3 files changed, 7 insertions, 3 deletions
diff --git a/gi/pf/align-lexonly-pyp.cc b/gi/pf/align-lexonly-pyp.cc
index d68a4b8f..4a1d1db6 100644
--- a/gi/pf/align-lexonly-pyp.cc
+++ b/gi/pf/align-lexonly-pyp.cc
@@ -208,6 +208,7 @@ int main(int argc, char** argv) {
}
for (unsigned i = 0; i < corpus.size(); ++i)
WriteAlignments(corpus[i]);
+ aligner.model.Summary();
return 0;
}
diff --git a/gi/pf/pyp_tm.cc b/gi/pf/pyp_tm.cc
index 94cbe7c3..b5262f47 100644
--- a/gi/pf/pyp_tm.cc
+++ b/gi/pf/pyp_tm.cc
@@ -54,8 +54,6 @@ struct ConditionalPYPWordModel {
assert(it != r.end());
if (it->second.decrement(trglets, rng)) {
base.Decrement(trglets, rng);
- if (it->second.num_customers() == 0)
- r.erase(it);
}
}
@@ -84,6 +82,11 @@ PYPLexicalTranslation::PYPLexicalTranslation(const vector<vector<WordID> >& lets
tmodel(new ConditionalPYPWordModel<PYPWordModel>(up0)),
kX(-TD::Convert("X")) {}
+void PYPLexicalTranslation::Summary() const {
+ tmodel->Summary();
+ up0->Summary();
+}
+
prob_t PYPLexicalTranslation::Likelihood() const {
prob_t p = up0->Likelihood();
p *= tmodel->Likelihood();
diff --git a/gi/pf/pyp_word_model.h b/gi/pf/pyp_word_model.h
index 800a4fd7..ff366865 100644
--- a/gi/pf/pyp_word_model.h
+++ b/gi/pf/pyp_word_model.h
@@ -12,7 +12,7 @@
// PYP(d,s,poisson-uniform) represented as a CRP
struct PYPWordModel {
- explicit PYPWordModel(const unsigned vocab_e_size, const double mean_len = 7.5) :
+ explicit PYPWordModel(const unsigned vocab_e_size, const double mean_len = 5) :
base(prob_t::One()), r(1,1,1,1,0.66,50.0), u0(-std::log(vocab_e_size)), mean_length(mean_len) {}
void ResampleHyperparameters(MT19937* rng);