From ac40b555b98a2ea295d48e95263086b52ed3b74b Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Sun, 24 Jun 2012 22:30:50 -0400 Subject: minimum risk training, not completely ready for primetime --- minrisk/minrisk_optimize.cc | 141 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 minrisk/minrisk_optimize.cc (limited to 'minrisk/minrisk_optimize.cc') diff --git a/minrisk/minrisk_optimize.cc b/minrisk/minrisk_optimize.cc new file mode 100644 index 00000000..5096acc1 --- /dev/null +++ b/minrisk/minrisk_optimize.cc @@ -0,0 +1,141 @@ +#include +#include +#include +#include + +#include +#include + +#include "liblbfgs/lbfgs++.h" +#include "filelib.h" +#include "stringlib.h" +#include "weights.h" +#include "hg_io.h" +#include "kbest.h" +#include "viterbi.h" +#include "ns.h" +#include "ns_docscorer.h" +#include "candidate_set.h" +#include "risk.h" + +using namespace std; +namespace po = boost::program_options; + +void InitCommandLine(int argc, char** argv, po::variables_map* conf) { + po::options_description opts("Configuration options"); + opts.add_options() + ("reference,r",po::value >(), "[REQD] Reference translation (tokenized text)") + ("weights,w",po::value(), "[REQD] Weights files from current iterations") + ("input,i",po::value()->default_value("-"), "Input file to map (- is STDIN)") + ("evaluation_metric,m",po::value()->default_value("IBM_BLEU"), "Evaluation metric (ibm_bleu, koehn_bleu, nist_bleu, ter, meteor, etc.)") + ("kbest_repository,R",po::value(), "Accumulate k-best lists from previous iterations (parameter is path to repository)") + ("kbest_size,k",po::value()->default_value(500u), "Top k-hypotheses to extract") + ("help,h", "Help"); + po::options_description dcmdline_options; + dcmdline_options.add(opts); + po::store(parse_command_line(argc, argv, dcmdline_options), *conf); + bool flag = false; + if (!conf->count("reference")) { + cerr << "Please specify one or more references using -r \n"; + flag = true; + } + if (!conf->count("weights")) { + cerr << "Please specify weights using -w \n"; + flag = true; + } + if (flag || conf->count("help")) { + cerr << dcmdline_options << endl; + exit(1); + } +} + +EvaluationMetric* metric = NULL; + +struct RiskObjective { + explicit RiskObjective(const vector& tr) : training(tr) {} + double operator()(const vector& x, double* g) const { + fill(g, g + x.size(), 0.0); + double obj = 0; + for (unsigned i = 0; i < training.size(); ++i) { + training::CandidateSetRisk risk(training[i], *metric); + SparseVector tg; + double r = risk(x, &tg); + obj += r; + for (SparseVector::iterator it = tg.begin(); it != tg.end(); ++it) + g[it->first] += it->second; + } + cerr << (1-(obj / training.size())) << endl; + return obj; + } + const vector& training; +}; + +double LearnParameters(const vector& training, + const double C1, + const unsigned memory_buffers, + vector* px) { + RiskObjective obj(training); + LBFGS lbfgs(px, obj, memory_buffers, C1); + lbfgs.MinimizeFunction(); + return 0; +} + +// runs lines 4--15 of rampion algorithm +int main(int argc, char** argv) { + po::variables_map conf; + InitCommandLine(argc, argv, &conf); + const string evaluation_metric = conf["evaluation_metric"].as(); + + metric = EvaluationMetric::Instance(evaluation_metric); + DocumentScorer ds(metric, conf["reference"].as >()); + cerr << "Loaded " << ds.size() << " references for scoring with " << evaluation_metric << endl; + double goodsign = -1; + double badsign = -goodsign; + + Hypergraph hg; + string last_file; + ReadFile in_read(conf["input"].as()); + string kbest_repo; + if (conf.count("kbest_repository")) { + kbest_repo = conf["kbest_repository"].as(); + MkDirP(kbest_repo); + } + istream &in=*in_read.stream(); + const unsigned kbest_size = conf["kbest_size"].as(); + vector weights; + const string weightsf = conf["weights"].as(); + Weights::InitFromFile(weightsf, &weights); + string line, file; + vector kis; + cerr << "Loading hypergraphs...\n"; + while(getline(in, line)) { + istringstream is(line); + int sent_id; + kis.resize(kis.size() + 1); + training::CandidateSet& curkbest = kis.back(); + string kbest_file; + if (kbest_repo.size()) { + ostringstream os; + os << kbest_repo << "/kbest." << sent_id << ".txt.gz"; + kbest_file = os.str(); + if (FileExists(kbest_file)) + curkbest.ReadFromFile(kbest_file); + } + is >> file >> sent_id; + ReadFile rf(file); + if (kis.size() % 5 == 0) { cerr << '.'; } + if (kis.size() % 200 == 0) { cerr << " [" << kis.size() << "]\n"; } + HypergraphIO::ReadFromJSON(rf.stream(), &hg); + hg.Reweight(weights); + curkbest.AddKBestCandidates(hg, kbest_size, ds[sent_id]); + if (kbest_file.size()) + curkbest.WriteToFile(kbest_file); + } + cerr << "\nHypergraphs loaded.\n"; + weights.resize(FD::NumFeats()); + + LearnParameters(kis, 0.0, 100, &weights); + Weights::WriteToFile("-", weights); + return 0; +} + -- cgit v1.2.3 From c84ef9590d11819b7f8441a53b1699a912d949e1 Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Tue, 26 Jun 2012 13:03:46 -0400 Subject: minrisk impl --- minrisk/minrisk.pl | 2 +- minrisk/minrisk_optimize.cc | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) (limited to 'minrisk/minrisk_optimize.cc') diff --git a/minrisk/minrisk.pl b/minrisk/minrisk.pl index 99893a66..d05b9595 100755 --- a/minrisk/minrisk.pl +++ b/minrisk/minrisk.pl @@ -133,7 +133,7 @@ if ($metric =~ /^ter$|^aer$/i) { my $refs_comma_sep = get_comma_sep_refs('r',$refFiles); unless ($dir){ - $dir = "rampion"; + $dir = "minrisk"; } unless ($dir =~ /^\//){ # convert relative path to absolute path my $basedir = check_output("pwd"); diff --git a/minrisk/minrisk_optimize.cc b/minrisk/minrisk_optimize.cc index 5096acc1..6e651994 100644 --- a/minrisk/minrisk_optimize.cc +++ b/minrisk/minrisk_optimize.cc @@ -105,6 +105,13 @@ int main(int argc, char** argv) { vector weights; const string weightsf = conf["weights"].as(); Weights::InitFromFile(weightsf, &weights); + double t = 0; + for (unsigned i = 0; i < weights.size(); ++i) + t += weights[i] * weights[i]; + if (t > 0) { + for (unsigned i = 0; i < weights.size(); ++i) + weights[i] /= sqrt(t); + } string line, file; vector kis; cerr << "Loading hypergraphs...\n"; -- cgit v1.2.3 From 3044d6d1c6d428e8d06c255e3a2d739bcd187679 Mon Sep 17 00:00:00 2001 From: Chris Dyer Date: Fri, 29 Jun 2012 18:45:26 -0700 Subject: add option for entropy optimization --- minrisk/minrisk_optimize.cc | 67 +++++++++++++++++++++++++++++++++++++++------ training/Makefile.am | 1 + training/entropy.cc | 41 +++++++++++++++++++++++++++ training/entropy.h | 22 +++++++++++++++ utils/fdict.h | 2 ++ 5 files changed, 124 insertions(+), 9 deletions(-) create mode 100644 training/entropy.cc create mode 100644 training/entropy.h (limited to 'minrisk/minrisk_optimize.cc') diff --git a/minrisk/minrisk_optimize.cc b/minrisk/minrisk_optimize.cc index 6e651994..da8b5260 100644 --- a/minrisk/minrisk_optimize.cc +++ b/minrisk/minrisk_optimize.cc @@ -17,6 +17,7 @@ #include "ns_docscorer.h" #include "candidate_set.h" #include "risk.h" +#include "entropy.h" using namespace std; namespace po = boost::program_options; @@ -28,6 +29,9 @@ void InitCommandLine(int argc, char** argv, po::variables_map* conf) { ("weights,w",po::value(), "[REQD] Weights files from current iterations") ("input,i",po::value()->default_value("-"), "Input file to map (- is STDIN)") ("evaluation_metric,m",po::value()->default_value("IBM_BLEU"), "Evaluation metric (ibm_bleu, koehn_bleu, nist_bleu, ter, meteor, etc.)") + ("temperature,T",po::value()->default_value(0.0), "Temperature parameter for objective (>0 increases the entropy)") + ("l1_strength,C",po::value()->default_value(0.0), "L1 regularization strength") + ("memory_buffers,M",po::value()->default_value(20), "Memory buffers used in LBFGS") ("kbest_repository,R",po::value(), "Accumulate k-best lists from previous iterations (parameter is path to repository)") ("kbest_size,k",po::value()->default_value(500u), "Top k-hypotheses to extract") ("help,h", "Help"); @@ -52,36 +56,80 @@ void InitCommandLine(int argc, char** argv, po::variables_map* conf) { EvaluationMetric* metric = NULL; struct RiskObjective { - explicit RiskObjective(const vector& tr) : training(tr) {} + explicit RiskObjective(const vector& tr, const double temp) : training(tr), T(temp) {} double operator()(const vector& x, double* g) const { fill(g, g + x.size(), 0.0); double obj = 0; + double h = 0; for (unsigned i = 0; i < training.size(); ++i) { training::CandidateSetRisk risk(training[i], *metric); - SparseVector tg; + training::CandidateSetEntropy entropy(training[i]); + SparseVector tg, hg; double r = risk(x, &tg); + double hh = entropy(x, &hg); + h += hh; obj += r; for (SparseVector::iterator it = tg.begin(); it != tg.end(); ++it) g[it->first] += it->second; + if (T) { + for (SparseVector::iterator it = hg.begin(); it != hg.end(); ++it) + g[it->first] += T * it->second; + } } - cerr << (1-(obj / training.size())) << endl; - return obj; + cerr << (1-(obj / training.size())) << " H=" << h << endl; + return obj - T * h; } const vector& training; + const double T; // temperature for entropy regularization }; double LearnParameters(const vector& training, + const double temp, // > 0 increases the entropy, < 0 decreases the entropy const double C1, const unsigned memory_buffers, vector* px) { - RiskObjective obj(training); + RiskObjective obj(training, temp); LBFGS lbfgs(px, obj, memory_buffers, C1); lbfgs.MinimizeFunction(); return 0; } -// runs lines 4--15 of rampion algorithm +#if 0 +struct FooLoss { + double operator()(const vector& x, double* g) const { + fill(g, g + x.size(), 0.0); + training::CandidateSet cs; + training::CandidateSetEntropy cse(cs); + cs.cs.resize(3); + cs.cs[0].fmap.set_value(FD::Convert("F1"), -1.0); + cs.cs[1].fmap.set_value(FD::Convert("F2"), 1.0); + cs.cs[2].fmap.set_value(FD::Convert("F1"), 2.0); + cs.cs[2].fmap.set_value(FD::Convert("F2"), 0.5); + SparseVector xx; + double h = cse(x, &xx); + cerr << cse(x, &xx) << endl; cerr << "G: " << xx << endl; + for (SparseVector::iterator i = xx.begin(); i != xx.end(); ++i) + g[i->first] += i->second; + return -h; + } +}; +#endif + int main(int argc, char** argv) { +#if 0 + training::CandidateSet cs; + training::CandidateSetEntropy cse(cs); + cs.cs.resize(3); + cs.cs[0].fmap.set_value(FD::Convert("F1"), -1.0); + cs.cs[1].fmap.set_value(FD::Convert("F2"), 1.0); + cs.cs[2].fmap.set_value(FD::Convert("F1"), 2.0); + cs.cs[2].fmap.set_value(FD::Convert("F2"), 0.5); + FooLoss foo; + vector ww(FD::NumFeats()); ww[FD::Convert("F1")] = 1.0; + LBFGS lbfgs(&ww, foo, 100, 0.0); + lbfgs.MinimizeFunction(); + return 1; +#endif po::variables_map conf; InitCommandLine(argc, argv, &conf); const string evaluation_metric = conf["evaluation_metric"].as(); @@ -89,8 +137,6 @@ int main(int argc, char** argv) { metric = EvaluationMetric::Instance(evaluation_metric); DocumentScorer ds(metric, conf["reference"].as >()); cerr << "Loaded " << ds.size() << " references for scoring with " << evaluation_metric << endl; - double goodsign = -1; - double badsign = -goodsign; Hypergraph hg; string last_file; @@ -141,7 +187,10 @@ int main(int argc, char** argv) { cerr << "\nHypergraphs loaded.\n"; weights.resize(FD::NumFeats()); - LearnParameters(kis, 0.0, 100, &weights); + double c1 = conf["l1_strength"].as(); + double temp = conf["temperature"].as(); + unsigned m = conf["memory_buffers"].as(); + LearnParameters(kis, temp, c1, m, &weights); Weights::WriteToFile("-", weights); return 0; } diff --git a/training/Makefile.am b/training/Makefile.am index 68ebfab4..4cef0d5b 100644 --- a/training/Makefile.am +++ b/training/Makefile.am @@ -26,6 +26,7 @@ TESTS = lbfgs_test optimize_test noinst_LIBRARIES = libtraining.a libtraining_a_SOURCES = \ candidate_set.cc \ + entropy.cc \ optimize.cc \ online_optimizer.cc \ risk.cc diff --git a/training/entropy.cc b/training/entropy.cc new file mode 100644 index 00000000..4fdbe2be --- /dev/null +++ b/training/entropy.cc @@ -0,0 +1,41 @@ +#include "entropy.h" + +#include "prob.h" +#include "candidate_set.h" + +using namespace std; + +namespace training { + +// see Mann and McCallum "Efficient Computation of Entropy Gradient ..." for +// a mostly clear derivation of: +// g = E[ F(x,y) * log p(y|x) ] + H(y | x) * E[ F(x,y) ] +double CandidateSetEntropy::operator()(const vector& params, + SparseVector* g) const { + prob_t z; + vector dps(cands_.size()); + for (unsigned i = 0; i < cands_.size(); ++i) { + dps[i] = cands_[i].fmap.dot(params); + const prob_t u(dps[i], init_lnx()); + z += u; + } + const double log_z = log(z); + + SparseVector exp_feats; + double entropy = 0; + for (unsigned i = 0; i < cands_.size(); ++i) { + const double log_prob = cands_[i].fmap.dot(params) - log_z; + const double prob = exp(log_prob); + const double e_logprob = prob * log_prob; + entropy -= e_logprob; + if (g) { + (*g) += cands_[i].fmap * e_logprob; + exp_feats += cands_[i].fmap * prob; + } + } + if (g) (*g) += exp_feats * entropy; + return entropy; +} + +} + diff --git a/training/entropy.h b/training/entropy.h new file mode 100644 index 00000000..796589ca --- /dev/null +++ b/training/entropy.h @@ -0,0 +1,22 @@ +#ifndef _CSENTROPY_H_ +#define _CSENTROPY_H_ + +#include +#include "sparse_vector.h" + +namespace training { + class CandidateSet; + + class CandidateSetEntropy { + public: + explicit CandidateSetEntropy(const CandidateSet& cs) : cands_(cs) {} + // compute the entropy (expected log likelihood) of a CandidateSet + // (optional) the gradient of the entropy with respect to params + double operator()(const std::vector& params, + SparseVector* g = NULL) const; + private: + const CandidateSet& cands_; + }; +}; + +#endif diff --git a/utils/fdict.h b/utils/fdict.h index 71547d2e..eb853fb2 100644 --- a/utils/fdict.h +++ b/utils/fdict.h @@ -33,6 +33,8 @@ struct FD { assert(dict_.max() == 0); // dictionary must not have // been added to hash_ = new PerfectHashFunction(cmph_file); +#else + (void) cmph_file; #endif } static inline int NumFeats() { -- cgit v1.2.3