diff options
author | Chris Dyer <cdyer@cs.cmu.edu> | 2011-07-12 23:32:11 -0400 |
---|---|---|
committer | Chris Dyer <cdyer@cs.cmu.edu> | 2011-07-12 23:32:11 -0400 |
commit | b8f7fc10e14eb07b17f1ef46f8ecd3c13f128814 (patch) | |
tree | b03020da64b1801db56ce2565a9144c3af559582 /pro-train | |
parent | 5e3c68b62dd72255db95c5822835a3931770f285 (diff) |
minor optimization
Diffstat (limited to 'pro-train')
-rw-r--r-- | pro-train/mr_pro_reduce.cc | 10 |
1 files changed, 6 insertions, 4 deletions
diff --git a/pro-train/mr_pro_reduce.cc b/pro-train/mr_pro_reduce.cc index e1a7db8a..5382e1a5 100644 --- a/pro-train/mr_pro_reduce.cc +++ b/pro-train/mr_pro_reduce.cc @@ -149,18 +149,20 @@ int main(int argc, char** argv) { #endif cll += reg; cerr << cll << " (REG=" << reg << ")\tPPL=" << ppl << "\t"; - bool failed = false; try { - opt.Optimize(cll, vg, &x); + vector<double> old_x = x; + do { + opt.Optimize(cll, vg, &x); + converged = opt.HasConverged(); + } while (!converged && x == old_x); } catch (...) { cerr << "Exception caught, assuming convergence is close enough...\n"; - failed = true; + converged = true; } if (fabs(x[0]) > MAX_BIAS) { cerr << "Biased model learned. Are your training instances wrong?\n"; cerr << " BIAS: " << x[0] << endl; } - converged = failed || opt.HasConverged(); } Weights w; if (conf.count("weights")) { |