diff options
Diffstat (limited to 'training/mira')
| -rw-r--r-- | training/mira/kbest_cut_mira.cc | 72 | ||||
| -rw-r--r-- | training/mira/kbest_mira.cc | 18 | ||||
| -rwxr-xr-x | training/mira/mira.py | 100 | 
3 files changed, 93 insertions, 97 deletions
diff --git a/training/mira/kbest_cut_mira.cc b/training/mira/kbest_cut_mira.cc index 59fa860a..990609d7 100644 --- a/training/mira/kbest_cut_mira.cc +++ b/training/mira/kbest_cut_mira.cc @@ -30,7 +30,6 @@  #include "sparse_vector.h"  using namespace std; -using boost::shared_ptr;  namespace po = boost::program_options;  bool invert_score; @@ -50,13 +49,6 @@ bool sent_approx;  bool checkloss;  bool stream; -void SanityCheck(const vector<double>& w) { -  for (int i = 0; i < w.size(); ++i) { -    assert(!isnan(w[i])); -    assert(!isinf(w[i])); -  } -} -  struct FComp {    const vector<double>& w_;    FComp(const vector<double>& w) : w_(w) {} @@ -149,7 +141,7 @@ struct HypothesisInfo {    double alpha;    double oracle_loss;    SparseVector<double> oracle_feat_diff; -  shared_ptr<HypothesisInfo> oracleN; +  boost::shared_ptr<HypothesisInfo> oracleN;  };  bool ApproxEqual(double a, double b) { @@ -157,7 +149,7 @@ bool ApproxEqual(double a, double b) {    return (fabs(a-b)/fabs(b)) < EPSILON;  } -typedef shared_ptr<HypothesisInfo> HI; +typedef boost::shared_ptr<HypothesisInfo> HI;  bool HypothesisCompareB(const HI& h1, const HI& h2 )   {    return h1->mt_metric > h2->mt_metric; @@ -185,11 +177,11 @@ bool HypothesisCompareG(const HI& h1, const HI& h2 )  }; -void CuttingPlane(vector<shared_ptr<HypothesisInfo> >* cur_c, bool* again, vector<shared_ptr<HypothesisInfo> >& all_hyp, vector<weight_t> dense_weights) +void CuttingPlane(vector<boost::shared_ptr<HypothesisInfo> >* cur_c, bool* again, vector<boost::shared_ptr<HypothesisInfo> >& all_hyp, vector<weight_t> dense_weights)  {    bool DEBUG_CUT = false; -  shared_ptr<HypothesisInfo> max_fear, max_fear_in_set; -  vector<shared_ptr<HypothesisInfo> >& cur_constraint = *cur_c; +  boost::shared_ptr<HypothesisInfo> max_fear, max_fear_in_set; +  vector<boost::shared_ptr<HypothesisInfo> >& cur_constraint = *cur_c;    if(no_reweight)      { @@ -235,9 +227,9 @@ void CuttingPlane(vector<shared_ptr<HypothesisInfo> >* cur_c, bool* again, vecto  } -double ComputeDelta(vector<shared_ptr<HypothesisInfo> >* cur_p, double max_step_size,vector<weight_t> dense_weights ) +double ComputeDelta(vector<boost::shared_ptr<HypothesisInfo> >* cur_p, double max_step_size,vector<weight_t> dense_weights )  { -  vector<shared_ptr<HypothesisInfo> >& cur_pair = *cur_p; +  vector<boost::shared_ptr<HypothesisInfo> >& cur_pair = *cur_p;     double loss = cur_pair[0]->oracle_loss - cur_pair[1]->oracle_loss;     double margin = -(cur_pair[0]->oracleN->features.dot(dense_weights)- cur_pair[0]->features.dot(dense_weights)) + (cur_pair[1]->oracleN->features.dot(dense_weights) - cur_pair[1]->features.dot(dense_weights)); @@ -261,12 +253,12 @@ double ComputeDelta(vector<shared_ptr<HypothesisInfo> >* cur_p, double max_step_  } -vector<shared_ptr<HypothesisInfo> > SelectPair(vector<shared_ptr<HypothesisInfo> >* cur_c) +vector<boost::shared_ptr<HypothesisInfo> > SelectPair(vector<boost::shared_ptr<HypothesisInfo> >* cur_c)  {    bool DEBUG_SELECT= false; -  vector<shared_ptr<HypothesisInfo> >& cur_constraint = *cur_c; +  vector<boost::shared_ptr<HypothesisInfo> >& cur_constraint = *cur_c; -  vector<shared_ptr<HypothesisInfo> > pair; +  vector<boost::shared_ptr<HypothesisInfo> > pair;    if (no_select || optimizer == 2){ //skip heuristic search and return oracle and fear for pa-mira @@ -278,7 +270,7 @@ vector<shared_ptr<HypothesisInfo> > SelectPair(vector<shared_ptr<HypothesisInfo>    for(int u=0;u != cur_constraint.size();u++)	      { -      shared_ptr<HypothesisInfo> max_fear; +      boost::shared_ptr<HypothesisInfo> max_fear;        if(DEBUG_SELECT) cerr<< "cur alpha " << u  << " " << cur_constraint[u]->alpha;        for(int i=0; i < cur_constraint.size();i++) //select maximal violator @@ -323,8 +315,8 @@ vector<shared_ptr<HypothesisInfo> > SelectPair(vector<shared_ptr<HypothesisInfo>  }  struct GoodBadOracle { -  vector<shared_ptr<HypothesisInfo> > good; -  vector<shared_ptr<HypothesisInfo> > bad; +  vector<boost::shared_ptr<HypothesisInfo> > good; +  vector<boost::shared_ptr<HypothesisInfo> > bad;  };  struct BasicObserver: public DecoderObserver { @@ -367,8 +359,8 @@ struct TrainingObserver : public DecoderObserver {    const DocScorer& ds;    vector<ScoreP>& corpus_bleu_sent_stats;    vector<GoodBadOracle>& oracles; -  vector<shared_ptr<HypothesisInfo> > cur_best; -  shared_ptr<HypothesisInfo> cur_oracle; +  vector<boost::shared_ptr<HypothesisInfo> > cur_best; +  boost::shared_ptr<HypothesisInfo> cur_oracle;    const int kbest_size;    Hypergraph forest;    int cur_sent; @@ -386,7 +378,7 @@ struct TrainingObserver : public DecoderObserver {      return *cur_best[0];    } -  const vector<shared_ptr<HypothesisInfo> > GetCurrentBest() const { +  const vector<boost::shared_ptr<HypothesisInfo> > GetCurrentBest() const {      return cur_best;    } @@ -411,8 +403,8 @@ struct TrainingObserver : public DecoderObserver {    } -  shared_ptr<HypothesisInfo> MakeHypothesisInfo(const SparseVector<double>& feats, const double score, const vector<WordID>& hyp) { -    shared_ptr<HypothesisInfo> h(new HypothesisInfo); +  boost::shared_ptr<HypothesisInfo> MakeHypothesisInfo(const SparseVector<double>& feats, const double score, const vector<WordID>& hyp) { +    boost::shared_ptr<HypothesisInfo> h(new HypothesisInfo);      h->features = feats;      h->mt_metric = score;      h->hyp = hyp; @@ -424,14 +416,14 @@ struct TrainingObserver : public DecoderObserver {  	if (stream) sent_id = 0;      bool PRINT_LIST= false; -    vector<shared_ptr<HypothesisInfo> >& cur_good = oracles[sent_id].good; -    vector<shared_ptr<HypothesisInfo> >& cur_bad = oracles[sent_id].bad; +    vector<boost::shared_ptr<HypothesisInfo> >& cur_good = oracles[sent_id].good; +    vector<boost::shared_ptr<HypothesisInfo> >& cur_bad = oracles[sent_id].bad;      //TODO: look at keeping previous iterations hypothesis lists around      cur_best.clear();      cur_good.clear();      cur_bad.clear(); -    vector<shared_ptr<HypothesisInfo> > all_hyp; +    vector<boost::shared_ptr<HypothesisInfo> > all_hyp;      typedef KBest::KBestDerivations<vector<WordID>, ESentenceTraversal,Filter> K;      K kbest(forest,kbest_size); @@ -527,7 +519,7 @@ struct TrainingObserver : public DecoderObserver {      if(PRINT_LIST) { cerr << "GOOD" << endl;  for(int u=0;u!=cur_good.size();u++) cerr << cur_good[u]->mt_metric << " " << cur_good[u]->hope << endl;}           //use hope for fear selection -    shared_ptr<HypothesisInfo>& oracleN = cur_good[0]; +    boost::shared_ptr<HypothesisInfo>& oracleN = cur_good[0];      if(fear_select == 1){   //compute fear hyps with model - bleu        if (PRINT_LIST) cerr << "FEAR " << endl; @@ -663,13 +655,13 @@ int main(int argc, char** argv) {      invert_score = false;    } -  shared_ptr<DocScorer> ds; +  boost::shared_ptr<DocScorer> ds;    //normal: load references, stream: start stream scorer    if (stream) { -	  ds = shared_ptr<DocScorer>(new DocStreamScorer(type, vector<string>(0), "")); +	  ds = boost::shared_ptr<DocScorer>(new DocStreamScorer(type, vector<string>(0), ""));  	  cerr << "Scoring doc stream with " << metric_name << endl;    } else { -      ds = shared_ptr<DocScorer>(new DocScorer(type, conf["reference"].as<vector<string> >(), "")); +      ds = boost::shared_ptr<DocScorer>(new DocScorer(type, conf["reference"].as<vector<string> >(), ""));        cerr << "Loaded " << ds->size() << " references for scoring with " << metric_name << endl;    }    vector<ScoreP> corpus_bleu_sent_stats; @@ -774,9 +766,9 @@ int main(int argc, char** argv) {        const HypothesisInfo& cur_good = *oracles[cur_sent].good[0];        const HypothesisInfo& cur_bad = *oracles[cur_sent].bad[0]; -      vector<shared_ptr<HypothesisInfo> >& cur_good_v = oracles[cur_sent].good; -      vector<shared_ptr<HypothesisInfo> >& cur_bad_v = oracles[cur_sent].bad; -      vector<shared_ptr<HypothesisInfo> > cur_best_v = observer.GetCurrentBest(); +      vector<boost::shared_ptr<HypothesisInfo> >& cur_good_v = oracles[cur_sent].good; +      vector<boost::shared_ptr<HypothesisInfo> >& cur_bad_v = oracles[cur_sent].bad; +      vector<boost::shared_ptr<HypothesisInfo> > cur_best_v = observer.GetCurrentBest();        tot_loss += cur_hyp.mt_metric; @@ -824,13 +816,13 @@ int main(int argc, char** argv) {  	}        else if(optimizer == 5) //full mira with n-best list of constraints from hope, fear, model best  	{ -	  vector<shared_ptr<HypothesisInfo> > cur_constraint; +	  vector<boost::shared_ptr<HypothesisInfo> > cur_constraint;  	  cur_constraint.insert(cur_constraint.begin(), cur_bad_v.begin(), cur_bad_v.end());  	  cur_constraint.insert(cur_constraint.begin(), cur_best_v.begin(), cur_best_v.end());  	  cur_constraint.insert(cur_constraint.begin(), cur_good_v.begin(), cur_good_v.end());  	  bool optimize_again; -	  vector<shared_ptr<HypothesisInfo> > cur_pair; +	  vector<boost::shared_ptr<HypothesisInfo> > cur_pair;  	  //SMO   	  for(int u=0;u!=cur_constraint.size();u++)	  	    cur_constraint[u]->alpha =0;	       @@ -879,7 +871,7 @@ int main(int argc, char** argv) {        else if(optimizer == 2 || optimizer == 3) //PA and Cutting Plane MIRA update  	  {  	    bool DEBUG_SMO= true; -	    vector<shared_ptr<HypothesisInfo> > cur_constraint; +	    vector<boost::shared_ptr<HypothesisInfo> > cur_constraint;  	    cur_constraint.push_back(cur_good_v[0]); //add oracle to constraint set  	    bool optimize_again = true;  	    int cut_plane_calls = 0; @@ -919,7 +911,7 @@ int main(int argc, char** argv) {  		    while (iter < smo_iter)  		      {			  			//select pair to optimize from constraint set -			vector<shared_ptr<HypothesisInfo> > cur_pair = SelectPair(&cur_constraint); +			vector<boost::shared_ptr<HypothesisInfo> > cur_pair = SelectPair(&cur_constraint);  			if(cur_pair.empty()){  			  iter=MAX_SMO;  diff --git a/training/mira/kbest_mira.cc b/training/mira/kbest_mira.cc index d59b4224..2868de0c 100644 --- a/training/mira/kbest_mira.cc +++ b/training/mira/kbest_mira.cc @@ -3,10 +3,10 @@  #include <vector>  #include <cassert>  #include <cmath> -#include <tr1/memory>  #include <boost/program_options.hpp>  #include <boost/program_options/variables_map.hpp> +#include <boost/shared_ptr.hpp>  #include "stringlib.h"  #include "hg_sampler.h" @@ -30,7 +30,7 @@ using namespace std;  namespace po = boost::program_options;  bool invert_score; -std::tr1::shared_ptr<MT19937> rng; +boost::shared_ptr<MT19937> rng;  void RandomPermutation(int len, vector<int>* p_ids) {    vector<int>& ids = *p_ids; @@ -88,8 +88,8 @@ struct HypothesisInfo {  };  struct GoodBadOracle { -  std::tr1::shared_ptr<HypothesisInfo> good; -  std::tr1::shared_ptr<HypothesisInfo> bad; +  boost::shared_ptr<HypothesisInfo> good; +  boost::shared_ptr<HypothesisInfo> bad;  };  struct TrainingObserver : public DecoderObserver { @@ -97,7 +97,7 @@ struct TrainingObserver : public DecoderObserver {    const DocumentScorer& ds;    const EvaluationMetric& metric;    vector<GoodBadOracle>& oracles; -  std::tr1::shared_ptr<HypothesisInfo> cur_best; +  boost::shared_ptr<HypothesisInfo> cur_best;    const int kbest_size;    const bool sample_forest; @@ -109,16 +109,16 @@ struct TrainingObserver : public DecoderObserver {      UpdateOracles(smeta.GetSentenceID(), *hg);    } -  std::tr1::shared_ptr<HypothesisInfo> MakeHypothesisInfo(const SparseVector<double>& feats, const double score) { -    std::tr1::shared_ptr<HypothesisInfo> h(new HypothesisInfo); +  boost::shared_ptr<HypothesisInfo> MakeHypothesisInfo(const SparseVector<double>& feats, const double score) { +    boost::shared_ptr<HypothesisInfo> h(new HypothesisInfo);      h->features = feats;      h->mt_metric = score;      return h;    }    void UpdateOracles(int sent_id, const Hypergraph& forest) { -    std::tr1::shared_ptr<HypothesisInfo>& cur_good = oracles[sent_id].good; -    std::tr1::shared_ptr<HypothesisInfo>& cur_bad = oracles[sent_id].bad; +    boost::shared_ptr<HypothesisInfo>& cur_good = oracles[sent_id].good; +    boost::shared_ptr<HypothesisInfo>& cur_bad = oracles[sent_id].bad;      cur_bad.reset();  // TODO get rid of??      if (sample_forest) { diff --git a/training/mira/mira.py b/training/mira/mira.py index 29c51e1d..d5a1d9f8 100755 --- a/training/mira/mira.py +++ b/training/mira/mira.py @@ -4,8 +4,19 @@ import subprocess, shlex, glob  import argparse  import logging  import random, time -import cdec.score  import gzip, itertools +try: +  import cdec.score +except ImportError: +  sys.stderr.write('Could not import pycdec, see cdec/python/README.md for details\n') +  sys.exit(1) +have_mpl = True +try:  +  import matplotlib +  matplotlib.use('Agg') +  import matplotlib.pyplot as plt +except ImportError: +  have_mpl = False  #mira run script  #requires pycdec to be built, since it is used for scoring hypothesis @@ -16,17 +27,17 @@ import gzip, itertools  #scoring function using pycdec scoring  def fast_score(hyps, refs, metric):    scorer = cdec.score.Scorer(metric) -  logging.info('loaded {0} references for scoring with {1}\n'.format( +  logging.info('loaded {0} references for scoring with {1}'.format(                  len(refs), metric))    if metric=='BLEU':      logging.warning('BLEU is ambiguous, assuming IBM_BLEU\n')      metric = 'IBM_BLEU'    elif metric=='COMBI':      logging.warning('COMBI metric is no longer supported, switching to ' -                    'COMB:TER=-0.5;BLEU=0.5\n') +                    'COMB:TER=-0.5;BLEU=0.5')      metric = 'COMB:TER=-0.5;BLEU=0.5'    stats = sum(scorer(r).evaluate(h) for h,r in itertools.izip(hyps,refs)) -  logging.info(stats.detail+'\n') +  logging.info('Score={} ({})'.format(stats.score, stats.detail))    return stats.score  #create new parallel input file in output directory in sgml format @@ -71,6 +82,8 @@ def main():    #set logging to write all info messages to stderr    logging.basicConfig(level=logging.INFO)    script_dir = os.path.dirname(os.path.abspath(sys.argv[0])) +  if not have_mpl: +    logging.warning('Failed to import matplotlib, graphs will not be generated.')    parser= argparse.ArgumentParser(              formatter_class=argparse.ArgumentDefaultsHelpFormatter) @@ -181,10 +194,11 @@ def main():    dev_size = enseg(args.devset, newdev, args.grammar_prefix)    args.devset = newdev -  write_config(args) +  log_config(args)    args.weights, hope_best_fear = optimize(args, script_dir, dev_size) -  graph_file = graph(args.output_dir, hope_best_fear, args.metric) +  graph_file = '' +  if have_mpl: graph_file = graph(args.output_dir, hope_best_fear, args.metric)    dev_results, dev_bleu = evaluate(args.devset, args.weights, args.config,                            script_dir, args.output_dir) @@ -205,17 +219,12 @@ def main():    if graph_file:      logging.info('A graph of the best/hope/fear scores over the iterations ' -                 'has been saved to {}\n'.format(graph_file)) +                 'has been saved to {}'.format(graph_file))    print 'final weights:\n{}\n'.format(args.weights)  #graph of hope/best/fear metric values across all iterations  def graph(output_dir, hope_best_fear, metric): -  try:  -    import matplotlib.pyplot as plt -  except ImportError: -    logging.error('Error importing matplotlib. Graphing disabled.\n') -    return ''    max_y = float(max(hope_best_fear['best']))*1.5    plt.plot(hope_best_fear['best'], label='best')    plt.plot(hope_best_fear['hope'], label='hope') @@ -308,6 +317,7 @@ def optimize(args, script_dir, dev_size):    decoder = script_dir+'/kbest_cut_mira'    (source, refs) = split_devset(args.devset, args.output_dir)    port = random.randint(15000,50000) +  logging.info('using port {}'.format(port))    num_features = 0    last_p_score = 0    best_score_iter = -1 @@ -316,8 +326,8 @@ def optimize(args, script_dir, dev_size):    hope_best_fear = {'hope':[],'best':[],'fear':[]}    #main optimization loop    while i<args.max_iterations: -    logging.info('\n\nITERATION {}\n========\n'.format(i)) -    logging.info('using port {}\n'.format(port)) +    logging.info('======= STARTING ITERATION {} ======='.format(i)) +    logging.info('Starting at {}'.format(time.asctime()))      #iteration specific files      runfile = args.output_dir+'/run.raw.'+str(i) @@ -327,10 +337,8 @@ def optimize(args, script_dir, dev_size):      weightdir = args.output_dir+'/weights.pass'+str(i)      os.mkdir(logdir)      os.mkdir(weightdir) -     -    logging.info('RUNNING DECODER AT {}'.format(time.asctime()))      weightsfile = args.output_dir+'/weights.'+str(i) -    logging.info('ITER {}\n'.format(i)) +    logging.info('  log directory={}'.format(logdir))      curr_pass = '0{}'.format(i)      decoder_cmd = ('{0} -c {1} -w {2} -r{3} -m {4} -s {5} -b {6} -k {7} -o {8}'                     ' -p {9} -O {10} -D {11} -h {12} -f {13} -C {14}').format( @@ -350,7 +358,7 @@ def optimize(args, script_dir, dev_size):                      parallelize, logdir, args.jobs)      cmd = parallel_cmd + ' ' + decoder_cmd -    logging.info('COMMAND: \n{}\n'.format(cmd)) +    logging.info('OPTIMIZATION COMMAND: {}'.format(cmd))      dlog = open(decoderlog,'w')      runf = open(runfile,'w') @@ -365,27 +373,26 @@ def optimize(args, script_dir, dev_size):        p1.stdout.close()        if exit_code: -        logging.error('Failed with exit code {}\n'.format(exit_code)) +        logging.error('Failed with exit code {}'.format(exit_code))          sys.exit(exit_code)        try:          f = open(runfile)        except IOError, msg: -        logging.error('Unable to open {}\n'.format(runfile)) +        logging.error('Unable to open {}'.format(runfile))          sys.exit()        num_topbest = sum(1 for line in f)        f.close()        if num_topbest == dev_size: break -      logging.warning('Incorrect number of top best. ' -                      'Waiting for distributed filesystem and retrying.') +      logging.warning('Incorrect number of top best. Sleeping for 10 seconds and retrying...')        time.sleep(10)        retries += 1      if dev_size != num_topbest:        logging.error("Dev set contains "+dev_size+" sentences, but we don't "                      "have topbest for all of these. Decoder failure? " -                    " Check "+decoderlog+'\n') +                    " Check "+decoderlog)        sys.exit()      dlog.close()      runf.close() @@ -427,7 +434,7 @@ def optimize(args, script_dir, dev_size):      hope_best_fear['hope'].append(dec_score)      hope_best_fear['best'].append(dec_score_h)      hope_best_fear['fear'].append(dec_score_f) -    logging.info('DECODER SCORE: {0} HOPE: {1} FEAR: {2}\n'.format( +    logging.info('DECODER SCORE: {0} HOPE: {1} FEAR: {2}'.format(                    dec_score, dec_score_h, dec_score_f))      if dec_score > best_score:        best_score_iter = i @@ -436,12 +443,13 @@ def optimize(args, script_dir, dev_size):      new_weights_file = '{}/weights.{}'.format(args.output_dir, i+1)      last_weights_file = '{}/weights.{}'.format(args.output_dir, i)      i += 1 -    weight_files = weightdir+'/weights.mira-pass*.*[0-9].gz' +    weight_files = args.output_dir+'/weights.pass*/weights.mira-pass*[0-9].gz'      average_weights(new_weights_file, weight_files) -  logging.info('\nBEST ITER: {} :: {}\n\n'.format( +  logging.info('BEST ITERATION: {} (SCORE={})'.format(                 best_score_iter, best_score))    weights_final = args.output_dir+'/weights.final' +  logging.info('WEIGHTS FILE: {}'.format(weights_final))    shutil.copy(last_weights_file, weights_final)    average_final_weights(args.output_dir) @@ -481,15 +489,15 @@ def gzip_file(filename):  #average the weights for a given pass  def average_weights(new_weights, weight_files): -  logging.info('AVERAGE {} {}\n'.format(new_weights, weight_files)) +  logging.info('AVERAGE {} {}'.format(new_weights, weight_files))    feature_weights = {}    total_mult = 0.0    for path in glob.glob(weight_files):      score = gzip.open(path)      mult = 0 -    logging.info('FILE {}\n'.format(path)) +    logging.info('  FILE {}'.format(path))      msg, ran, mult = score.readline().strip().split(' ||| ') -    logging.info('Processing {} {}'.format(ran, mult)) +    logging.info('  Processing {} {}'.format(ran, mult))      for line in score:        f,w = line.split(' ',1)        if f in feature_weights: @@ -500,34 +508,30 @@ def average_weights(new_weights, weight_files):      score.close()    #write new weights to outfile +  logging.info('Writing averaged weights to {}'.format(new_weights))    out = open(new_weights, 'w')    for f in iter(feature_weights):      avg = feature_weights[f]/total_mult -    logging.info('{} {} {} ||| Printing {} {}\n'.format(f,feature_weights[f],  -                 total_mult, f, avg))      out.write('{} {}\n'.format(f,avg)) -def write_config(args): -  config = ('\n' -            'DECODER: ' -            '/usr0/home/eschling/cdec/training/mira/kbest_cut_mira\n' -            'INI FILE: '+args.config+'\n' -            'WORKING DIRECTORY: '+args.output_dir+'\n' -            'DEVSET: '+args.devset+'\n' -            'EVAL METRIC: '+args.metric+'\n' -            'MAX ITERATIONS: '+str(args.max_iterations)+'\n' -            'DECODE NODES: '+str(args.jobs)+'\n' -            'INITIAL WEIGHTS: '+args.weights+'\n') +def log_config(args): +  logging.info('WORKING DIRECTORY={}'.format(args.output_dir)) +  logging.info('INI FILE={}'.format(args.config)) +  logging.info('DEVSET={}'.format(args.devset)) +  logging.info('EVAL METRIC={}'.format(args.metric)) +  logging.info('MAX ITERATIONS={}'.format(args.max_iterations)) +  logging.info('PARALLEL JOBS={}'.format(args.jobs)) +  logging.info('INITIAL WEIGHTS={}'.format(args.weights))    if args.grammar_prefix: -    config += 'GRAMMAR PREFIX: '+str(args.grammar_prefix)+'\n' +    logging.info('GRAMMAR PREFIX={}'.format(args.grammar_prefix))    if args.test: -    config += 'TEST SET: '+args.test+'\n' +    logging.info('TEST SET={}'.format(args.test)) +  else: +    logging.info('TEST SET=none specified')    if args.test_config: -    config += 'TEST CONFIG: '+args.test_config+'\n' +    logging.info('TEST CONFIG={}'.format(args.test_config))    if args.email: -    config += 'EMAIL: '+args.email+'\n' -            -  logging.info(config) +    logging.info('EMAIL={}'.format(args.email))  if __name__=='__main__':    main()  | 
