diff options
author | Patrick Simianer <p@simianer.de> | 2012-04-23 21:44:02 +0200 |
---|---|---|
committer | Patrick Simianer <p@simianer.de> | 2012-04-23 21:44:02 +0200 |
commit | 1c733723583862a5fcee5352433022008a9dc4e0 (patch) | |
tree | d4f9a17498d6bc9210e0e3cebbcfc3635736b669 /utils | |
parent | d3aa71d1095a5c45c1d3ca3155259e5fe0b58df2 (diff) | |
parent | 44508c1ad1bf88b1568713317b4a1e0be78804f8 (diff) |
Merge remote-tracking branch 'upstream/master'
Conflicts:
Makefile.am
configure.ac
Diffstat (limited to 'utils')
-rw-r--r-- | utils/alias_sampler.h | 50 | ||||
-rw-r--r-- | utils/logval.h | 13 | ||||
-rw-r--r-- | utils/unigram_pyp_lm.cc | 168 | ||||
-rw-r--r-- | utils/weights.cc | 4 |
4 files changed, 231 insertions, 4 deletions
diff --git a/utils/alias_sampler.h b/utils/alias_sampler.h new file mode 100644 index 00000000..81541f7a --- /dev/null +++ b/utils/alias_sampler.h @@ -0,0 +1,50 @@ +#ifndef _ALIAS_SAMPLER_H_ +#define _ALIAS_SAMPLER_H_ + +#include <vector> +#include <limits> + +// R. A. Kronmal and A. V. Peterson, Jr. (1977) On the alias method for +// generating random variables from a discrete distribution. In The American +// Statistician, Vol. 33, No. 4. Pages 214--218. +// +// Intuition: a multinomial with N outcomes can be rewritten as a uniform +// mixture of N Bernoulli distributions. The ith Bernoulli returns i with +// probability F[i], otherwise it returns an "alias" value L[i]. The +// constructor computes the F's and L's given an arbitrary multionimial p in +// O(n) time and Draw returns samples in O(1) time. +struct AliasSampler { + AliasSampler() {} + explicit AliasSampler(const std::vector<double>& p) { Init(p); } + void Init(const std::vector<double>& p) { + const unsigned N = p.size(); + cutoffs_.resize(p.size()); + aliases_.clear(); + aliases_.resize(p.size(), std::numeric_limits<unsigned>::max()); + std::vector<unsigned> s,g; + for (unsigned i = 0; i < N; ++i) { + const double cutoff = cutoffs_[i] = N * p[i]; + if (cutoff >= 1.0) g.push_back(i); else s.push_back(i); + } + while(!s.empty() && !g.empty()) { + const unsigned k = g.back(); + const unsigned j = s.back(); + aliases_[j] = k; + cutoffs_[k] -= 1.0 - cutoffs_[j]; + s.pop_back(); + if (cutoffs_[k] < 1.0) { + g.pop_back(); + s.push_back(k); + } + } + } + template <typename Uniform01Generator> + unsigned Draw(Uniform01Generator& u01) const { + const unsigned n = u01() * cutoffs_.size(); + if (u01() > cutoffs_[n]) return aliases_[n]; else return n; + } + std::vector<double> cutoffs_; // F + std::vector<unsigned> aliases_; // L +}; + +#endif diff --git a/utils/logval.h b/utils/logval.h index 8a59d0b1..ec1f6acd 100644 --- a/utils/logval.h +++ b/utils/logval.h @@ -30,8 +30,6 @@ class LogVal { LogVal(init_minus_1) : s_(true),v_(0) { } LogVal(init_1) : s_(),v_(0) { } LogVal(init_0) : s_(),v_(LOGVAL_LOG0) { } - explicit LogVal(int x) : s_(x<0), v_(s_ ? std::log(-x) : std::log(x)) {} - explicit LogVal(unsigned x) : s_(0), v_(std::log(x)) { } LogVal(double lnx,bool sign) : s_(sign),v_(lnx) {} LogVal(double lnx,init_lnx) : s_(),v_(lnx) {} static Self exp(T lnx) { return Self(lnx,false); } @@ -126,7 +124,7 @@ class LogVal { } Self operator-() const { - return Self(v_,-s_); + return Self(v_,!s_); } void negate() { s_ = !s_; } @@ -193,6 +191,15 @@ T log(const LogVal<T>& o) { return o.v_; } +template<class T> +LogVal<T> abs(const LogVal<T>& o) { + if (o.s_) { + LogVal<T> res = o; + res.s_ = false; + return res; + } else { return o; } +} + template <class T> LogVal<T> pow(const LogVal<T>& b, const T& e) { return b.pow(e); diff --git a/utils/unigram_pyp_lm.cc b/utils/unigram_pyp_lm.cc new file mode 100644 index 00000000..510e8839 --- /dev/null +++ b/utils/unigram_pyp_lm.cc @@ -0,0 +1,168 @@ +#include <iostream> +#include <tr1/memory> +#include <queue> + +#include <boost/functional.hpp> +#include <boost/program_options.hpp> +#include <boost/program_options/variables_map.hpp> + +#include "corpus_tools.h" +#include "m.h" +#include "tdict.h" +#include "sampler.h" +#include "ccrp.h" + +// A not very memory-efficient implementation of an 1-gram LM based on PYPs +// as described in Y.-W. Teh. (2006) A Hierarchical Bayesian Language Model +// based on Pitman-Yor Processes. In Proc. ACL. + +using namespace std; +using namespace tr1; +namespace po = boost::program_options; + +boost::shared_ptr<MT19937> prng; + +void InitCommandLine(int argc, char** argv, po::variables_map* conf) { + po::options_description opts("Configuration options"); + opts.add_options() + ("samples,n",po::value<unsigned>()->default_value(50),"Number of samples") + ("train,i",po::value<string>(),"Training data file") + ("test,T",po::value<string>(),"Test data file") + ("discount_prior_a,a",po::value<double>()->default_value(1.0), "discount ~ Beta(a,b): a=this") + ("discount_prior_b,b",po::value<double>()->default_value(1.0), "discount ~ Beta(a,b): b=this") + ("strength_prior_s,s",po::value<double>()->default_value(1.0), "strength ~ Gamma(s,r): s=this") + ("strength_prior_r,r",po::value<double>()->default_value(1.0), "strength ~ Gamma(s,r): r=this") + ("random_seed,S",po::value<uint32_t>(), "Random seed"); + po::options_description clo("Command line options"); + clo.add_options() + ("config", po::value<string>(), "Configuration file") + ("help", "Print this help message and exit"); + po::options_description dconfig_options, dcmdline_options; + dconfig_options.add(opts); + dcmdline_options.add(opts).add(clo); + + po::store(parse_command_line(argc, argv, dcmdline_options), *conf); + if (conf->count("config")) { + ifstream config((*conf)["config"].as<string>().c_str()); + po::store(po::parse_config_file(config, dconfig_options), *conf); + } + po::notify(*conf); + + if (conf->count("help") || (conf->count("train") == 0)) { + cerr << dcmdline_options << endl; + exit(1); + } +} + +// uniform base distribution (0-gram model) +struct UniformWordModel { + explicit UniformWordModel(unsigned vocab_size) : p0(1.0 / vocab_size), draws() {} + void increment() { ++draws; } + void decrement() { --draws; assert(draws >= 0); } + double prob(WordID) const { return p0; } // all words have equal prob + double log_likelihood() const { return draws * log(p0); } + const double p0; + int draws; +}; + +// represents an Unigram LM +struct UnigramLM { + UnigramLM(unsigned vs, double da, double db, double ss, double sr) : + uniform_vocab(vs), + crp(da, db, ss, sr, 0.8, 1.0) {} + void increment(WordID w, MT19937* rng) { + const double backoff = uniform_vocab.prob(w); + if (crp.increment(w, backoff, rng)) + uniform_vocab.increment(); + } + void decrement(WordID w, MT19937* rng) { + if (crp.decrement(w, rng)) + uniform_vocab.decrement(); + } + double prob(WordID w) const { + const double backoff = uniform_vocab.prob(w); + return crp.prob(w, backoff); + } + + double log_likelihood() const { + double llh = uniform_vocab.log_likelihood(); + llh += crp.log_crp_prob(); + return llh; + } + + void resample_hyperparameters(MT19937* rng) { + crp.resample_hyperparameters(rng); + } + + double discount_a, discount_b, strength_s, strength_r; + double d, strength; + UniformWordModel uniform_vocab; + CCRP<WordID> crp; +}; + +int main(int argc, char** argv) { + po::variables_map conf; + + InitCommandLine(argc, argv, &conf); + const unsigned samples = conf["samples"].as<unsigned>(); + if (conf.count("random_seed")) + prng.reset(new MT19937(conf["random_seed"].as<uint32_t>())); + else + prng.reset(new MT19937); + MT19937& rng = *prng; + vector<vector<WordID> > corpuse; + set<WordID> vocabe; + const WordID kEOS = TD::Convert("</s>"); + cerr << "Reading corpus...\n"; + CorpusTools::ReadFromFile(conf["train"].as<string>(), &corpuse, &vocabe); + cerr << "E-corpus size: " << corpuse.size() << " sentences\t (" << vocabe.size() << " word types)\n"; + vector<vector<WordID> > test; + if (conf.count("test")) + CorpusTools::ReadFromFile(conf["test"].as<string>(), &test); + else + test = corpuse; + UnigramLM lm(vocabe.size(), + conf["discount_prior_a"].as<double>(), + conf["discount_prior_b"].as<double>(), + conf["strength_prior_s"].as<double>(), + conf["strength_prior_r"].as<double>()); + for (int SS=0; SS < samples; ++SS) { + for (int ci = 0; ci < corpuse.size(); ++ci) { + const vector<WordID>& s = corpuse[ci]; + for (int i = 0; i <= s.size(); ++i) { + WordID w = (i < s.size() ? s[i] : kEOS); + if (SS > 0) lm.decrement(w, &rng); + lm.increment(w, &rng); + } + if (SS > 0) lm.decrement(kEOS, &rng); + lm.increment(kEOS, &rng); + } + cerr << "LLH=" << lm.log_likelihood() << endl; + //if (SS % 10 == 9) lm.resample_hyperparameters(&rng); + } + double llh = 0; + unsigned cnt = 0; + unsigned oovs = 0; + for (int ci = 0; ci < test.size(); ++ci) { + const vector<WordID>& s = test[ci]; + for (int i = 0; i <= s.size(); ++i) { + WordID w = (i < s.size() ? s[i] : kEOS); + double lp = log(lm.prob(w)) / log(2); + if (i < s.size() && vocabe.count(w) == 0) { + cerr << "**OOV "; + ++oovs; + lp = 0; + } + cerr << "p(" << TD::Convert(w) << ") = " << lp << endl; + llh -= lp; + cnt++; + } + } + cerr << " Log_10 prob: " << (-llh * log(2) / log(10)) << endl; + cerr << " Count: " << cnt << endl; + cerr << " OOVs: " << oovs << endl; + cerr << "Cross-entropy: " << (llh / cnt) << endl; + cerr << " Perplexity: " << pow(2, llh / cnt) << endl; + return 0; +} + diff --git a/utils/weights.cc b/utils/weights.cc index ac407dfb..39c18474 100644 --- a/utils/weights.cc +++ b/utils/weights.cc @@ -144,8 +144,10 @@ void Weights::ShowLargestFeatures(const vector<weight_t>& w) { vector<int> fnums(w.size()); for (int i = 0; i < w.size(); ++i) fnums[i] = i; + int nf = FD::NumFeats(); + if (nf > 10) nf = 10; vector<int>::iterator mid = fnums.begin(); - mid += (w.size() > 10 ? 10 : w.size()); + mid += nf; partial_sort(fnums.begin(), mid, fnums.end(), FComp(w)); cerr << "TOP FEATURES:"; for (vector<int>::iterator i = fnums.begin(); i != mid; ++i) { |