summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorredpony <redpony@ec762483-ff6d-05da-a07a-a48fb63a330f>2010-09-21 20:10:27 +0000
committerredpony <redpony@ec762483-ff6d-05da-a07a-a48fb63a330f>2010-09-21 20:10:27 +0000
commit0277e5da3aa444ced74af8085f3349f91d17e56f (patch)
tree91e9db117a6c5a3cfe02998de943dd0bf28c58e9
parent46d6e1fd3dbe0bc116a46fdab91b40fe2ecd6803 (diff)
fix for lattice cost bug
git-svn-id: https://ws10smt.googlecode.com/svn/trunk@653 ec762483-ff6d-05da-a07a-a48fb63a330f
-rw-r--r--decoder/bottom_up_parser.cc2
-rw-r--r--decoder/decoder.cc6
-rw-r--r--decoder/rule_lexer.l7
-rw-r--r--decoder/scfg_translator.cc6
-rw-r--r--utils/weights.cc18
-rw-r--r--utils/weights.h2
6 files changed, 25 insertions, 16 deletions
diff --git a/decoder/bottom_up_parser.cc b/decoder/bottom_up_parser.cc
index 2d945222..9504419c 100644
--- a/decoder/bottom_up_parser.cc
+++ b/decoder/bottom_up_parser.cc
@@ -188,7 +188,7 @@ void PassiveChart::ApplyRule(const int i,
new_edge->i_ = i;
new_edge->j_ = j;
new_edge->feature_values_ = r->GetFeatureValues();
- if (lattice_cost)
+ if (lattice_cost && lc_fid_)
new_edge->feature_values_.set_value(lc_fid_, lattice_cost);
Cat2NodeMap& c2n = nodemap_(i,j);
const bool is_goal = (r->GetLHS() == kGOAL);
diff --git a/decoder/decoder.cc b/decoder/decoder.cc
index e0967e40..1a233fc5 100644
--- a/decoder/decoder.cc
+++ b/decoder/decoder.cc
@@ -580,10 +580,10 @@ DecoderImpl::DecoderImpl(po::variables_map& conf, int argc, char** argv, istream
cerr << "prelm rescoring with "<<prelm_ffs.size()<<" 0-state feature functions. +LM pass will use "<<late_ffs.size()<<" features (not counting rule features)."<<endl;
late_models = new ModelSet(feature_weights, late_ffs);
- show_models(conf,*late_models,"late ");
+ if (!SILENT) show_models(conf,*late_models,"late ");
prelm_models = new ModelSet(prelm_feature_weights, prelm_ffs);
- if (has_prelm_models)
- show_models(conf,*prelm_models,"prelm ");
+ if (has_prelm_models) {
+ if (!SILENT) show_models(conf,*prelm_models,"prelm "); }
int palg = 1;
if (LowercaseString(str("intersection_strategy",conf)) == "full") {
diff --git a/decoder/rule_lexer.l b/decoder/rule_lexer.l
index 2de94587..2e5d3bf5 100644
--- a/decoder/rule_lexer.l
+++ b/decoder/rule_lexer.l
@@ -11,6 +11,7 @@
#include "tdict.h"
#include "fdict.h"
#include "trule.h"
+#include "verbose.h"
int lex_line = 0;
std::istream* scfglex_stream = NULL;
@@ -222,8 +223,10 @@ NT [^\t \[\],]+
// std::cerr << rp->AsString() << std::endl;
num_rules++;
lex_line++;
- if (num_rules % 50000 == 0) { std::cerr << '.' << std::flush; fl = true; }
- if (num_rules % 2000000 == 0) { std::cerr << " [" << num_rules << "]\n"; fl = false; }
+ if (!SILENT) {
+ if (num_rules % 50000 == 0) { std::cerr << '.' << std::flush; fl = true; }
+ if (num_rules % 2000000 == 0) { std::cerr << " [" << num_rules << "]\n"; fl = false; }
+ }
ctf_level = 0;
BEGIN(INITIAL);
}
diff --git a/decoder/scfg_translator.cc b/decoder/scfg_translator.cc
index 4e6cc226..60123e6f 100644
--- a/decoder/scfg_translator.cc
+++ b/decoder/scfg_translator.cc
@@ -34,7 +34,7 @@ struct SCFGTranslatorImpl {
if(conf.count("grammar")){
vector<string> gfiles = conf["grammar"].as<vector<string> >();
for (int i = 0; i < gfiles.size(); ++i) {
- cerr << "Reading SCFG grammar from " << gfiles[i] << endl;
+ if (!SILENT) cerr << "Reading SCFG grammar from " << gfiles[i] << endl;
TextGrammar* g = new TextGrammar(gfiles[i]);
g->SetMaxSpan(max_span_limit);
g->SetGrammarName(gfiles[i]);
@@ -46,7 +46,7 @@ struct SCFGTranslatorImpl {
GlueGrammar* g = new GlueGrammar(conf["scfg_extra_glue_grammar"].as<string>());
g->SetGrammarName("ExtraGlueGrammar");
grammars.push_back(GrammarPtr(g));
- cerr << "Adding glue grammar from file " << conf["scfg_extra_glue_grammar"].as<string>() << endl;
+ if (!SILENT) cerr << "Adding glue grammar from file " << conf["scfg_extra_glue_grammar"].as<string>() << endl;
}
ctf_iterations_=0;
if (use_ctf_){
@@ -74,7 +74,7 @@ struct SCFGTranslatorImpl {
GlueGrammar* g = new GlueGrammar(goal, default_nt, ctf_iterations_);
g->SetGrammarName("GlueGrammar");
grammars.push_back(GrammarPtr(g));
- cerr << "Adding glue grammar for default nonterminal " << default_nt <<
+ if (!SILENT) cerr << "Adding glue grammar for default nonterminal " << default_nt <<
" and goal nonterminal " << goal << endl;
}
}
diff --git a/utils/weights.cc b/utils/weights.cc
index 84647585..ea8bd816 100644
--- a/utils/weights.cc
+++ b/utils/weights.cc
@@ -4,11 +4,12 @@
#include "fdict.h"
#include "filelib.h"
+#include "verbose.h"
using namespace std;
void Weights::InitFromFile(const std::string& filename, vector<string>* feature_list) {
- cerr << "Reading weights from " << filename << endl;
+ if (!SILENT) cerr << "Reading weights from " << filename << endl;
ReadFile in_file(filename);
istream& in = *in_file.stream();
assert(in);
@@ -38,17 +39,22 @@ void Weights::InitFromFile(const std::string& filename, vector<string>* feature_
wv_[fid] = val;
if (feature_list) { feature_list->push_back(FD::Convert(fid)); }
++weight_count;
- if (weight_count % 50000 == 0) { cerr << '.' << flush; fl = true; }
- if (weight_count % 2000000 == 0) { cerr << " [" << weight_count << "]\n"; fl = false; }
+ if (!SILENT) {
+ if (weight_count % 50000 == 0) { cerr << '.' << flush; fl = true; }
+ if (weight_count % 2000000 == 0) { cerr << " [" << weight_count << "]\n"; fl = false; }
+ }
+ }
+ if (!SILENT) {
+ if (fl) { cerr << endl; }
+ cerr << "Loaded " << weight_count << " feature weights\n";
}
- if (fl) { cerr << endl; }
- cerr << "Loaded " << weight_count << " feature weights\n";
}
-void Weights::WriteToFile(const std::string& fname, bool hide_zero_value_features) const {
+void Weights::WriteToFile(const std::string& fname, bool hide_zero_value_features, const string* extra) const {
WriteFile out(fname);
ostream& o = *out.stream();
assert(o);
+ if (extra) { o << "# " << *extra << endl; }
o.precision(17);
const int num_feats = FD::NumFeats();
for (int i = 1; i < num_feats; ++i) {
diff --git a/utils/weights.h b/utils/weights.h
index f19aa3ce..1849f959 100644
--- a/utils/weights.h
+++ b/utils/weights.h
@@ -10,7 +10,7 @@ class Weights {
public:
Weights() {}
void InitFromFile(const std::string& fname, std::vector<std::string>* feature_list = NULL);
- void WriteToFile(const std::string& fname, bool hide_zero_value_features = true) const;
+ void WriteToFile(const std::string& fname, bool hide_zero_value_features = true, const std::string* extra = NULL) const;
void InitVector(std::vector<double>* w) const;
void InitSparseVector(SparseVector<double>* w) const;
void InitFromVector(const std::vector<double>& w);