1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
|
#include <iostream>
#include <vector>
#include <cassert>
#include <cmath>
#include "config.h"
#ifdef HAVE_MPI
#include <boost/mpi.hpp>
#endif
#include <boost/program_options.hpp>
#include <boost/program_options/variables_map.hpp>
#include "sentence_metadata.h"
#include "verbose.h"
#include "hg.h"
#include "prob.h"
#include "inside_outside.h"
#include "ff_register.h"
#include "decoder.h"
#include "filelib.h"
#include "weights.h"
using namespace std;
namespace po = boost::program_options;
bool InitCommandLine(int argc, char** argv, po::variables_map* conf) {
po::options_description opts("Configuration options");
opts.add_options()
("weights,w",po::value<string>(),"Input feature weights file")
("training_data,t",po::value<string>(),"Training data corpus")
("decoder_config,c",po::value<string>(),"Decoder configuration file");
po::options_description clo("Command line options");
clo.add_options()
("config", po::value<string>(), "Configuration file")
("help,h", "Print this help message and exit");
po::options_description dconfig_options, dcmdline_options;
dconfig_options.add(opts);
dcmdline_options.add(opts).add(clo);
po::store(parse_command_line(argc, argv, dcmdline_options), *conf);
if (conf->count("config")) {
ifstream config((*conf)["config"].as<string>().c_str());
po::store(po::parse_config_file(config, dconfig_options), *conf);
}
po::notify(*conf);
if (conf->count("help") || !conf->count("training_data") || !conf->count("decoder_config")) {
cerr << dcmdline_options << endl;
return false;
}
return true;
}
void ReadInstances(const string& fname, int rank, int size, vector<string>* c) {
assert(fname != "-");
ReadFile rf(fname);
istream& in = *rf.stream();
string line;
int lc = 0;
while(in) {
getline(in, line);
if (!in) break;
if (lc % size == rank) c->push_back(line);
++lc;
}
}
static const double kMINUS_EPSILON = -1e-6;
struct ConditionalLikelihoodObserver : public DecoderObserver {
ConditionalLikelihoodObserver() : trg_words(), acc_obj(), cur_obj() {}
virtual void NotifyDecodingStart(const SentenceMetadata&) {
cur_obj = 0;
state = 1;
}
// compute model expectations, denominator of objective
virtual void NotifyTranslationForest(const SentenceMetadata&, Hypergraph* hg) {
assert(state == 1);
state = 2;
SparseVector<prob_t> cur_model_exp;
const prob_t z = InsideOutside<prob_t,
EdgeProb,
SparseVector<prob_t>,
EdgeFeaturesAndProbWeightFunction>(*hg, &cur_model_exp);
cur_obj = log(z);
}
// compute "empirical" expectations, numerator of objective
virtual void NotifyAlignmentForest(const SentenceMetadata& smeta, Hypergraph* hg) {
assert(state == 2);
state = 3;
SparseVector<prob_t> ref_exp;
const prob_t ref_z = InsideOutside<prob_t,
EdgeProb,
SparseVector<prob_t>,
EdgeFeaturesAndProbWeightFunction>(*hg, &ref_exp);
double log_ref_z;
#if 0
if (crf_uniform_empirical) {
log_ref_z = ref_exp.dot(feature_weights);
} else {
log_ref_z = log(ref_z);
}
#else
log_ref_z = log(ref_z);
#endif
// rounding errors means that <0 is too strict
if ((cur_obj - log_ref_z) < kMINUS_EPSILON) {
cerr << "DIFF. ERR! log_model_z < log_ref_z: " << cur_obj << " " << log_ref_z << endl;
exit(1);
}
assert(!isnan(log_ref_z));
acc_obj += (cur_obj - log_ref_z);
trg_words += smeta.GetReference().size();
}
unsigned trg_words;
double acc_obj;
double cur_obj;
int state;
};
#ifdef HAVE_MPI
namespace mpi = boost::mpi;
#endif
int main(int argc, char** argv) {
#ifdef HAVE_MPI
mpi::environment env(argc, argv);
mpi::communicator world;
const int size = world.size();
const int rank = world.rank();
#else
const int size = 1;
const int rank = 0;
#endif
if (size > 1) SetSilent(true); // turn off verbose decoder output
register_feature_functions();
po::variables_map conf;
if (!InitCommandLine(argc, argv, &conf))
return false;
// load cdec.ini and set up decoder
ReadFile ini_rf(conf["decoder_config"].as<string>());
Decoder decoder(ini_rf.stream());
if (decoder.GetConf()["input"].as<string>() != "-") {
cerr << "cdec.ini must not set an input file\n";
abort();
}
// load weights
vector<weight_t>& weights = decoder.CurrentWeightVector();
if (conf.count("weights"))
Weights::InitFromFile(conf["weights"].as<string>(), &weights);
vector<string> corpus;
ReadInstances(conf["training_data"].as<string>(), rank, size, &corpus);
assert(corpus.size() > 0);
if (rank == 0)
cerr << "Each processor is decoding ~" << corpus.size() << " training examples...\n";
ConditionalLikelihoodObserver observer;
for (int i = 0; i < corpus.size(); ++i)
decoder.Decode(corpus[i], &observer);
double objective = 0;
unsigned total_words = 0;
#ifdef HAVE_MPI
reduce(world, observer.acc_obj, objective, std::plus<double>(), 0);
reduce(world, observer.trg_words, total_words, std::plus<unsigned>(), 0);
#else
objective = observer.acc_obj;
#endif
if (rank == 0) {
cout << "CONDITIONAL LOG_e LIKELIHOOD: " << objective << endl;
cout << "CONDITIONAL LOG_2 LIKELIHOOD: " << (objective/log(2)) << endl;
cout << " CONDITIONAL ENTROPY: " << (objective/log(2) / total_words) << endl;
cout << " PERPLEXITY: " << pow(2, (objective/log(2) / total_words)) << endl;
}
return 0;
}
|