From d7d59c4bb81262f1dfece384ec68fa2c25096843 Mon Sep 17 00:00:00 2001 From: graehl Date: Fri, 16 Jul 2010 01:56:34 +0000 Subject: oracle directions git-svn-id: https://ws10smt.googlecode.com/svn/trunk@276 ec762483-ff6d-05da-a07a-a48fb63a330f --- decoder/cdec.cc | 29 +++---------- decoder/ff_fsa.h | 5 +++ decoder/logval.h | 6 +++ decoder/oracle_bleu.h | 79 +++++++++++++++++++++++++++++------ decoder/sparse_vector.h | 39 +++++++++++++++-- decoder/value_array.h | 12 +++--- decoder/viterbi.cc | 2 +- decoder/viterbi.h | 2 +- vest/mr_vest_generate_mapper_input.cc | 75 +++++++++++++++++++++------------ 9 files changed, 174 insertions(+), 75 deletions(-) diff --git a/decoder/cdec.cc b/decoder/cdec.cc index e616f1bb..75c907b1 100644 --- a/decoder/cdec.cc +++ b/decoder/cdec.cc @@ -308,7 +308,7 @@ bool prelm_weights_string(po::variables_map const& conf,string &s) } -void forest_stats(Hypergraph &forest,string name,bool show_tree,bool show_features,FeatureWeights *weights=0) { +void forest_stats(Hypergraph &forest,string name,bool show_tree,bool show_features,WeightVector *weights=0) { cerr << viterbi_stats(forest,name,true,show_tree); if (show_features) { cerr << name<<" features: "; @@ -601,33 +601,14 @@ int main(int argc, char** argv) { vector trans; ViterbiESentence(forest, &trans); + /*Oracle Rescoring*/ if(get_oracle_forest) { - Timer t("Forest Oracle rescoring:"); - - oracle.DumpKBest(conf,"model",sent_id, forest, 10, true); - - Translation best(forest); - { - Hypergraph oracle_forest; - oracle.Rescore(smeta,forest,&oracle_forest,feature_weights,1.0); - forest.swap(oracle_forest); - } - Translation oracle_trans(forest); - + Oracles o=oracles.ComputeOracles(smeta,forest,feature_weights,&cerr,10,conf["forest_output"].as()); cerr << " +Oracle BLEU forest (nodes/edges): " << forest.nodes_.size() << '/' << forest.edges_.size() << endl; cerr << " +Oracle BLEU (paths): " << forest.NumberOfPaths() << endl; - oracle_trans.Print(cerr," +Oracle BLEU"); - //compute kbest for oracle - oracle.DumpKBest(conf,"oracle",sent_id, forest, 10, true); - - //reweight the model with -1 for the BLEU feature to compute k-best list for negative examples - oracle.ReweightBleu(&forest,-1.0); - Translation neg_trans(forest); - neg_trans.Print(cerr," -Oracle BLEU"); - //compute kbest for negative - oracle.DumpKBest(conf,"negative",sent_id, forest, 10, true); - + o.hope.Print(cerr," +Oracle BLEU"); + o.fear.Print(cerr," -Oracle BLEU"); //Add 1-best translation (trans) to psuedo-doc vectors oracle.IncludeLastScore(&cerr); } diff --git a/decoder/ff_fsa.h b/decoder/ff_fsa.h index cd56f1a5..2ffd6ef8 100755 --- a/decoder/ff_fsa.h +++ b/decoder/ff_fsa.h @@ -1,9 +1,14 @@ #ifndef FF_FSA_H #define FF_FSA_H +#include //C99 #include #include "ff.h" #include "sparse_vector.h" +#include "value_array.h" + +typedef ValueArray Bytes; + /* */ diff --git a/decoder/logval.h b/decoder/logval.h index 9aaba557..c8c342a3 100644 --- a/decoder/logval.h +++ b/decoder/logval.h @@ -58,6 +58,12 @@ class LogVal { return *this += b; } + // LogVal(fabs(log(x)),x.s_) + friend LogVal abslog(LogVal x) { + if (x.v_<0) x.v_=-x.v_; + return x; + } + LogVal& poweq(const T& power) { #if LOGVAL_CHECK_NEG if (s_) { diff --git a/decoder/oracle_bleu.h b/decoder/oracle_bleu.h index 5fef53fd..550f438f 100755 --- a/decoder/oracle_bleu.h +++ b/decoder/oracle_bleu.h @@ -37,7 +37,31 @@ struct Translation { out<add_options() ("references,R", value(), "Translation reference files") ("oracle_loss", value(), "IBM_BLEU_3 (default), IBM_BLEU etc") + ("bleu_weight", value()->default_value(1.), "weight to give the hope/fear loss function vs. model score") ; } int order; @@ -66,17 +91,20 @@ struct OracleBleu { double doc_src_length; void set_oracle_doc_size(int size) { oracle_doc_size=size; - scale_oracle= 1-1./oracle_doc_size;\ + scale_oracle= 1-1./oracle_doc_size; doc_src_length=0; } OracleBleu(int doc_size=10) { set_oracle_doc_size(doc_size); } - boost::shared_ptr doc_score,sentscore; // made from factory, so we delete them + typedef boost::shared_ptr ScoreP; + ScoreP doc_score,sentscore; // made from factory, so we delete them + double bleu_weight; void UseConf(boost::program_options::variables_map const& conf) { using namespace std; + bleu_weight=conf["bleu_weight"].as(); set_loss(conf["oracle_loss"].as()); set_refs(conf["references"].as()); } @@ -108,21 +136,48 @@ struct OracleBleu { ViterbiFSentence(forest,&srcsent); SentenceMetadata sm(sent_id,Lattice()); //TODO: make reference from refs? sm.SetSourceLength(srcsent.size()); + smeta.SetScore(doc_score.get()); + smeta.SetDocScorer(&ds); + smeta.SetDocLen(doc_src_length); return sm; } - void Rescore(SentenceMetadata & smeta,Hypergraph const& forest,Hypergraph *dest_forest,WeightVector const& feature_weights,double bleu_weight=1.0) { - Translation model_trans(forest); - sentscore.reset(ds[smeta.GetSentenceID()]->ScoreCandidate(model_trans.sentence)); + Oracles ComputeOracles(SentenceMetadata & smeta,Hypergraph const& forest,WeightVector const& feature_weights,std::ostream *log=0,unsigned kbest=0,std::string const& forest_output="") { + Oracles r; + int sent_id=smeta.GetSentenceID(); + r.model=Translation(forest); + + if (kbest) DumpKBest("model",sent_id, forest, kbest, true, forest_output); + { + Timer t("Forest Oracle rescoring:"); + Hypergraph oracle_forest; + Rescore(smeta,forest,&oracle_forest,feature_weights,blue_weight,log); + forest.swap(oracle_forest); + } + r.hope=Translation(forest); + if (kbest) DumpKBest("oracle",sent_id, forest, kbest, true, forest_output); + oracle.ReweightBleu(&forest,-blue_weight); + r.fear=Translation(forest); + if (kbest) DumpKBest("negative",sent_id, forest, kbest, true, forest_output); + return r; + } + + ScoreP Score(Sentence const& sentence,int sent_id) { + return ds[sent_id]->ScoreCandidate(sentence); + } + ScoreP Score(Hypergraph const& forest,int sent_id) { + return Score(model_trans(forest).translation,sent_id); + } + + void Rescore(SentenceMetadata & smeta,Hypergraph const& forest,Hypergraph *dest_forest,WeightVector const& feature_weights,double bleu_weight=1.0,std::ostream *log=&std::cerr) { + // the sentence bleu stats will get added to doc only if you call IncludeLastScore + sentscore=Score(forest,smeta.GetSentenceID()); if (!doc_score) { doc_score.reset(sentscore->GetOne()); } tmp_src_length = smeta.GetSourceLength(); //TODO: where does this come from? - smeta.SetScore(doc_score.get()); - smeta.SetDocLen(doc_src_length); - smeta.SetDocScorer(&ds); using namespace std; - ModelSet oracle_models(FeatureWeights(bleu_weight,1),vector(1,pff.get())); + ModelSet oracle_models(WeightVector(bleu_weight,1),vector(1,pff.get())); const IntersectionConfiguration inter_conf_oracle(0, 0); - cerr << "Going to call Apply Model " << endl; + if (log) *log << "Going to call Apply Model " << endl; ApplyModelSet(forest, smeta, oracle_models, @@ -190,10 +245,10 @@ struct OracleBleu { } } - void DumpKBest(boost::program_options::variables_map const& conf,std::string const& suffix,const int sent_id, const Hypergraph& forest, const int k, const bool unique) +void DumpKBest(boost::program_options::variables_map const& conf,std::string const& suffix,const int sent_id, const Hypergraph& forest, const int k, const bool unique, std::string const& forest_output) { std::ostringstream kbest_string_stream; - kbest_string_stream << conf["forest_output"].as() << "/kbest_"< const& v) { typename MapType::iterator p=values_.begin(); - for (unsigned i=0;i +class SparseVectorList { + typedef std::vector ListType; + typedef typename ListType::value_type pair_type; + typedef typename ListType::const_iterator const_iterator; + SparseVectorList() { } + explicit SparseVectorList(std::vector const& v) { + const T z=T(0); + for (unsigned i=0;i FeatureVector; -typedef std::vector FeatureWeights; -typedef FeatureWeights WeightVector; +typedef SparseVector WeightVector; template SparseVector operator+(const SparseVector& a, const SparseVector& b) { diff --git a/decoder/value_array.h b/decoder/value_array.h index bfdd1155..7401938a 100755 --- a/decoder/value_array.h +++ b/decoder/value_array.h @@ -1,12 +1,12 @@ #ifndef VALUE_ARRAY_H #define VALUE_ARRAY_H -# include -# include -# include -# include -# include -# include +#include +#include +#include +#include +#include +#include #ifdef USE_BOOST_SERIALIZE # include # include diff --git a/decoder/viterbi.cc b/decoder/viterbi.cc index f11b77ec..7719de32 100644 --- a/decoder/viterbi.cc +++ b/decoder/viterbi.cc @@ -116,7 +116,7 @@ inline bool close_enough(double a,double b,double epsilon) return diff<=epsilon*fabs(a) || diff<=epsilon*fabs(b); } -FeatureVector ViterbiFeatures(Hypergraph const& hg,FeatureWeights const* weights,bool fatal_dotprod_disagreement) { +FeatureVector ViterbiFeatures(Hypergraph const& hg,WeightVector const* weights,bool fatal_dotprod_disagreement) { FeatureVector r; const prob_t p = Viterbi(hg, &r); if (weights) { diff --git a/decoder/viterbi.h b/decoder/viterbi.h index 4697590b..388bff3c 100644 --- a/decoder/viterbi.h +++ b/decoder/viterbi.h @@ -205,6 +205,6 @@ int ViterbiELength(const Hypergraph& hg); int ViterbiPathLength(const Hypergraph& hg); /// if weights supplied, assert viterbi prob = features.dot(*weights) (exception if fatal, cerr warn if not). return features (sum over all edges in viterbi derivation) -FeatureVector ViterbiFeatures(Hypergraph const& hg,FeatureWeights const* weights=0,bool fatal_dotprod_disagreement=false); +FeatureVector ViterbiFeatures(Hypergraph const& hg,WeightVector const* weights=0,bool fatal_dotprod_disagreement=false); #endif diff --git a/vest/mr_vest_generate_mapper_input.cc b/vest/mr_vest_generate_mapper_input.cc index e9a5650b..677c0497 100644 --- a/vest/mr_vest_generate_mapper_input.cc +++ b/vest/mr_vest_generate_mapper_input.cc @@ -84,16 +84,16 @@ struct oracle_directions { OracleBleu::AddOptions(&opts); opts.add_options() ("dev_set_size,s",po::value(&dev_set_size),"[REQD] Development set size (# of parallel sentences)") - ("forest_repository,r",po::value(),"[REQD] Path to forest repository") - ("weights,w",po::value(),"[REQD] Current feature weights file") + ("forest_repository,r",po::value(&forest_repository),"[REQD] Path to forest repository") + ("weights,w",po::value(&weights_file),"[REQD] Current feature weights file") ("optimize_feature,o",po::value >(), "Feature to optimize (if none specified, all weights listed in the weights file will be optimized)") - ("random_directions,d",po::value()->default_value(20),"Number of random directions to run the line optimizer in") + ("random_directions,d",po::value(&random_directions)->default_value(10),"Number of random directions to run the line optimizer in") ("no_primary,n","don't use the primary (orthogonal each feature alone) directions") - ("oracle_directions,O",po::value()->default_value(0),"read the forests and choose this many directions based on heading toward a hope max (bleu+modelscore) translation.") + ("oracle_directions,O",po::value(&n_oracle)->default_value(0),"read the forests and choose this many directions based on heading toward a hope max (bleu+modelscore) translation.") ("oracle_start_random",po::bool_switch(&start_random),"sample random subsets of dev set for ALL oracle directions, not just those after a sequential run through it") - ("oracle_batch,b",po::value()->default_value(10),"to produce each oracle direction, sum the 'gradient' over this many sentences") - ("max_similarity,m",po::value()->default_value(0),"remove directions that are too similar (Tanimoto coeff. less than (1-this)). 0 means don't filter, 1 means only 1 direction allowed?") - ("fear_to_hope,f","for each of the oracle_directions, also include a direction from fear to hope (as well as origin to hope)") + ("oracle_batch,b",po::value(&oracle_batch)->default_value(10),"to produce each oracle direction, sum the 'gradient' over this many sentences") + ("max_similarity,m",po::value(&max_similarity)->default_value(0),"remove directions that are too similar (Tanimoto coeff. less than (1-this)). 0 means don't filter, 1 means only 1 direction allowed?") + ("fear_to_hope,f",po::bool_switch(&fear_to_hope),"for each of the oracle_directions, also include a direction from fear to hope (as well as origin to hope)") ("help,h", "Help"); po::options_description dcmdline_options; dcmdline_options.add(opts); @@ -139,16 +139,20 @@ struct oracle_directions { oracle.UseConf(conf); include_primary=!conf.count("no_primary"); + old_to_hope=!conf.count("no_old_to_hope"); + if (conf.count("optimize_feature") > 0) optimize_features=conf["optimize_feature"].as >(); - fear_to_hope=conf.count("fear_to_hope"); - n_random=conf["random_directions"].as(); - forest_repository=conf["forest_repository"].as(); + + // po::value(&var) takes care of below: +// fear_to_hope=conf.count("fear_to_hope"); +// n_random=conf["random_directions"].as(); +// forest_repository=conf["forest_repository"].as(); // dev_set_size=conf["dev_set_size"].as(); - n_oracle=conf["oracle_directions"].as(); - oracle_batch=conf["oracle_batch"].as(); - max_similarity=conf["max_similarity"].as(); - weights_file=conf["weights"].as(); +// n_oracle=conf["oracle_directions"].as(); +// oracle_batch=conf["oracle_batch"].as(); +// max_similarity=conf["max_similarity"].as(); +// weights_file=conf["weights"].as(); Init(); } @@ -158,7 +162,7 @@ struct oracle_directions { unsigned n_oracle, oracle_batch; string forest_repository; unsigned dev_set_size; - vector dirs; //best_to_hope_dirs + vector oracles; vector fids; string forest_file(unsigned i) const { ostringstream o; @@ -178,6 +182,7 @@ struct oracle_directions { weights.InitSparseVector(&origin); fids.clear(); AddFeatureIds(features); + oracles.resize(dev_set_size); } Weights weights; @@ -189,26 +194,42 @@ struct oracle_directions { } - Dir const& operator[](unsigned i) { - Dir &dir=dirs[i]; - if (dir.empty()) { + //TODO: is it worthwhile to get a complete document bleu first? would take a list of 1best translations one per line from the decoders, rather than loading all the forests (expensive) + Oracle const& ComputeOracle(unsigned i) { + Oracle &o=oracles[i]; + if (o.is_null()) { ReadFile rf(forest_file(i)); - FeatureVector fear,hope,best; - //TODO: get hope/oracle from vlad. random for now. - LineOptimizer::RandomUnitVector(fids,&dir,&rng); + Hypergraph hg; + { + Timer t("Loading forest from JSON "+forest_file(i)); + HypergraphIO::ReadFromJSON(rf.stream(), &hg); + } + o=oracle.ComputeOracles(MakeMetadata(hg,i),hg,origin,&cerr); } - return dir; + return o; } + // if start_random is true, immediately sample w/ replacement from src sentences; otherwise, consume them sequentially until exhausted, then random. oracle vectors are summed void AddOracleDirections() { MT19937::IntRNG rsg=rng.inclusive(0,dev_set_size-1); unsigned b=0; for(unsigned i=0;i=dev_set_size)?rsg():b]; - d/=(double)oracle_batch; + Dir o2hope; + Dir fear2hope; + for (unsigned j=0;j=dev_set_size) ? rsg() : b); + + o2hope+=o.ModelHopeGradient(); + if (fear_to_hope) + fear2hope+=o.FearHopeGradient(); + } + double N=(double)oracle_batch; + o2hope/=N; + directions.push_back(o2hope); + if (fear_to_hope) { + fear2hope/=N; + directions.push_back(fear2hope); + } } } }; -- cgit v1.2.3