diff options
author | Wu, Ke <wuke@cs.umd.edu> | 2014-10-07 17:22:11 -0400 |
---|---|---|
committer | Wu, Ke <wuke@cs.umd.edu> | 2014-10-07 17:22:11 -0400 |
commit | 00968d1ba03c0603440fe5a765b87869b99a0a93 (patch) | |
tree | f264c07d9851b47b117839fc9345d7e1d5e880a2 /utils/synutils/maxent-3.0/maxent.cpp | |
parent | f762dbbf10a8204d0d0b82e9acb29feacd3b3bb4 (diff) |
Apply clang-format
Diffstat (limited to 'utils/synutils/maxent-3.0/maxent.cpp')
-rw-r--r-- | utils/synutils/maxent-3.0/maxent.cpp | 291 |
1 files changed, 148 insertions, 143 deletions
diff --git a/utils/synutils/maxent-3.0/maxent.cpp b/utils/synutils/maxent-3.0/maxent.cpp index feb0efdc..8d00ac1d 100644 --- a/utils/synutils/maxent-3.0/maxent.cpp +++ b/utils/synutils/maxent-3.0/maxent.cpp @@ -9,14 +9,13 @@ using namespace std; -double -ME_Model::FunctionGradient(const vector<double> & x, vector<double> & grad) -{ +double ME_Model::FunctionGradient(const vector<double>& x, + vector<double>& grad) { assert((int)_fb.Size() == x.size()); for (size_t i = 0; i < x.size(); i++) { _vl[i] = x[i]; } - + double score = update_model_expectation(); if (_l2reg == 0) { @@ -33,9 +32,7 @@ ME_Model::FunctionGradient(const vector<double> & x, vector<double> & grad) return -score; } -int -ME_Model::perform_GIS(int C) -{ +int ME_Model::perform_GIS(int C) { cerr << "C = " << C << endl; C = 1; cerr << "performing AGIS" << endl; @@ -43,11 +40,13 @@ ME_Model::perform_GIS(int C) double pre_logl = -999999; for (int iter = 0; iter < 200; iter++) { - double logl = update_model_expectation(); - fprintf(stderr, "iter = %2d C = %d f = %10.7f train_err = %7.5f", iter, C, logl, _train_error); + double logl = update_model_expectation(); + fprintf(stderr, "iter = %2d C = %d f = %10.7f train_err = %7.5f", iter, + C, logl, _train_error); if (_heldout.size() > 0) { double hlogl = heldout_likelihood(); - fprintf(stderr, " heldout_logl(err) = %f (%6.4f)", hlogl, _heldout_error); + fprintf(stderr, " heldout_logl(err) = %f (%6.4f)", hlogl, + _heldout_error); } cerr << endl; @@ -71,13 +70,13 @@ ME_Model::perform_GIS(int C) return 0; } -int -ME_Model::perform_QUASI_NEWTON() -{ +int ME_Model::perform_QUASI_NEWTON() { const int dim = _fb.Size(); vector<double> x0(dim); - for (int i = 0; i < dim; i++) { x0[i] = _vl[i]; } + for (int i = 0; i < dim; i++) { + x0[i] = _vl[i]; + } vector<double> x; if (_l1reg > 0) { @@ -88,34 +87,39 @@ ME_Model::perform_QUASI_NEWTON() x = perform_LBFGS(x0); } - for (int i = 0; i < dim; i++) { _vl[i] = x[i]; } + for (int i = 0; i < dim; i++) { + _vl[i] = x[i]; + } return 0; } -int -ME_Model::conditional_probability(const Sample & s, - std::vector<double> & membp) const -{ - //int num_classes = membp.size(); +int ME_Model::conditional_probability(const Sample& s, + std::vector<double>& membp) const { + // int num_classes = membp.size(); double sum = 0; int max_label = -1; // double maxp = 0; vector<double> powv(_num_classes, 0.0); - for (vector<int>::const_iterator j = s.positive_features.begin(); j != s.positive_features.end(); j++){ - for (vector<int>::const_iterator k = _feature2mef[*j].begin(); k != _feature2mef[*j].end(); k++) { + for (vector<int>::const_iterator j = s.positive_features.begin(); + j != s.positive_features.end(); j++) { + for (vector<int>::const_iterator k = _feature2mef[*j].begin(); + k != _feature2mef[*j].end(); k++) { powv[_fb.Feature(*k).label()] += _vl[*k]; } } - for (vector<pair<int, double> >::const_iterator j = s.rvfeatures.begin(); j != s.rvfeatures.end(); j++) { - for (vector<int>::const_iterator k = _feature2mef[j->first].begin(); k != _feature2mef[j->first].end(); k++) { + for (vector<pair<int, double> >::const_iterator j = s.rvfeatures.begin(); + j != s.rvfeatures.end(); j++) { + for (vector<int>::const_iterator k = _feature2mef[j->first].begin(); + k != _feature2mef[j->first].end(); k++) { powv[_fb.Feature(*k).label()] += _vl[*k] * j->second; } } - std::vector<double>::const_iterator pmax = max_element(powv.begin(), powv.end()); - double offset = max(0.0, *pmax - 700); // to avoid overflow + std::vector<double>::const_iterator pmax = + max_element(powv.begin(), powv.end()); + double offset = max(0.0, *pmax - 700); // to avoid overflow for (int label = 0; label < _num_classes; label++) { double pow = powv[label] - offset; double prod = exp(pow); @@ -134,33 +138,38 @@ ME_Model::conditional_probability(const Sample & s, return max_label; } -int -ME_Model::make_feature_bag(const int cutoff) -{ +int ME_Model::make_feature_bag(const int cutoff) { int max_num_features = 0; - // count the occurrences of features +// count the occurrences of features #ifdef USE_HASH_MAP typedef __gnu_cxx::hash_map<unsigned int, int> map_type; -#else +#else typedef std::map<unsigned int, int> map_type; #endif map_type count; if (cutoff > 0) { - for (std::vector<Sample>::const_iterator i = _vs.begin(); i != _vs.end(); i++) { - for (std::vector<int>::const_iterator j = i->positive_features.begin(); j != i->positive_features.end(); j++) { + for (std::vector<Sample>::const_iterator i = _vs.begin(); i != _vs.end(); + i++) { + for (std::vector<int>::const_iterator j = i->positive_features.begin(); + j != i->positive_features.end(); j++) { count[ME_Feature(i->label, *j).body()]++; } - for (std::vector<pair<int, double> >::const_iterator j = i->rvfeatures.begin(); j != i->rvfeatures.end(); j++) { + for (std::vector<pair<int, double> >::const_iterator j = + i->rvfeatures.begin(); + j != i->rvfeatures.end(); j++) { count[ME_Feature(i->label, j->first).body()]++; } } } - int n = 0; - for (std::vector<Sample>::const_iterator i = _vs.begin(); i != _vs.end(); i++, n++) { - max_num_features = max(max_num_features, (int)(i->positive_features.size())); - for (std::vector<int>::const_iterator j = i->positive_features.begin(); j != i->positive_features.end(); j++) { + int n = 0; + for (std::vector<Sample>::const_iterator i = _vs.begin(); i != _vs.end(); + i++, n++) { + max_num_features = + max(max_num_features, (int)(i->positive_features.size())); + for (std::vector<int>::const_iterator j = i->positive_features.begin(); + j != i->positive_features.end(); j++) { const ME_Feature feature(i->label, *j); // if (cutoff > 0 && count[feature.body()] < cutoff) continue; if (cutoff > 0 && count[feature.body()] <= cutoff) continue; @@ -168,7 +177,9 @@ ME_Model::make_feature_bag(const int cutoff) // cout << i->label << "\t" << *j << "\t" << id << endl; // feature2sample[id].push_back(n); } - for (std::vector<pair<int, double> >::const_iterator j = i->rvfeatures.begin(); j != i->rvfeatures.end(); j++) { + for (std::vector<pair<int, double> >::const_iterator j = + i->rvfeatures.begin(); + j != i->rvfeatures.end(); j++) { const ME_Feature feature(i->label, j->first); // if (cutoff > 0 && count[feature.body()] < cutoff) continue; if (cutoff > 0 && count[feature.body()] <= cutoff) continue; @@ -176,71 +187,72 @@ ME_Model::make_feature_bag(const int cutoff) } } count.clear(); - + // cerr << "num_classes = " << _num_classes << endl; // cerr << "max_num_features = " << max_num_features << endl; init_feature2mef(); - + return max_num_features; } -double -ME_Model::heldout_likelihood() -{ +double ME_Model::heldout_likelihood() { double logl = 0; int ncorrect = 0; - for (std::vector<Sample>::const_iterator i = _heldout.begin(); i != _heldout.end(); i++) { + for (std::vector<Sample>::const_iterator i = _heldout.begin(); + i != _heldout.end(); i++) { vector<double> membp(_num_classes); int l = classify(*i, membp); logl += log(membp[i->label]); if (l == i->label) ncorrect++; } _heldout_error = 1 - (double)ncorrect / _heldout.size(); - + return logl /= _heldout.size(); } -double -ME_Model::update_model_expectation() -{ +double ME_Model::update_model_expectation() { double logl = 0; int ncorrect = 0; _vme.resize(_fb.Size()); for (int i = 0; i < _fb.Size(); i++) _vme[i] = 0; - + int n = 0; - for (vector<Sample>::const_iterator i = _vs.begin(); i != _vs.end(); i++, n++) { + for (vector<Sample>::const_iterator i = _vs.begin(); i != _vs.end(); + i++, n++) { vector<double> membp(_num_classes); int max_label = conditional_probability(*i, membp); - + logl += log(membp[i->label]); // cout << membp[*i] << " " << logl << " "; if (max_label == i->label) ncorrect++; // model_expectation - for (vector<int>::const_iterator j = i->positive_features.begin(); j != i->positive_features.end(); j++){ - for (vector<int>::const_iterator k = _feature2mef[*j].begin(); k != _feature2mef[*j].end(); k++) { - _vme[*k] += membp[_fb.Feature(*k).label()]; + for (vector<int>::const_iterator j = i->positive_features.begin(); + j != i->positive_features.end(); j++) { + for (vector<int>::const_iterator k = _feature2mef[*j].begin(); + k != _feature2mef[*j].end(); k++) { + _vme[*k] += membp[_fb.Feature(*k).label()]; } } - for (vector<pair<int, double> >::const_iterator j = i->rvfeatures.begin(); j != i->rvfeatures.end(); j++) { - for (vector<int>::const_iterator k = _feature2mef[j->first].begin(); k != _feature2mef[j->first].end(); k++) { - _vme[*k] += membp[_fb.Feature(*k).label()] * j->second; + for (vector<pair<int, double> >::const_iterator j = i->rvfeatures.begin(); + j != i->rvfeatures.end(); j++) { + for (vector<int>::const_iterator k = _feature2mef[j->first].begin(); + k != _feature2mef[j->first].end(); k++) { + _vme[*k] += membp[_fb.Feature(*k).label()] * j->second; } } - } for (int i = 0; i < _fb.Size(); i++) { _vme[i] /= _vs.size(); } - + _train_error = 1 - (double)ncorrect / _vs.size(); logl /= _vs.size(); - + if (_l2reg > 0) { const double c = _l2reg; for (int i = 0; i < _fb.Size(); i++) { @@ -248,17 +260,17 @@ ME_Model::update_model_expectation() } } - //logl /= _vs.size(); - - // fprintf(stderr, "iter =%3d logl = %10.7f train_acc = %7.5f\n", iter, logl, (double)ncorrect/train.size()); - // fprintf(stderr, "logl = %10.7f train_acc = %7.5f\n", logl, (double)ncorrect/_train.size()); + // logl /= _vs.size(); + + // fprintf(stderr, "iter =%3d logl = %10.7f train_acc = %7.5f\n", iter, + // logl, (double)ncorrect/train.size()); + // fprintf(stderr, "logl = %10.7f train_acc = %7.5f\n", logl, + // (double)ncorrect/_train.size()); return logl; } -int -ME_Model::train(const vector<ME_Sample> & vms) -{ +int ME_Model::train(const vector<ME_Sample>& vms) { _vs.clear(); for (vector<ME_Sample>::const_iterator i = vms.begin(); i != vms.end(); i++) { add_training_sample(*i); @@ -267,39 +279,41 @@ ME_Model::train(const vector<ME_Sample> & vms) return train(); } -void -ME_Model::add_training_sample(const ME_Sample & mes) -{ +void ME_Model::add_training_sample(const ME_Sample& mes) { Sample s; s.label = _label_bag.Put(mes.label); if (s.label > ME_Feature::MAX_LABEL_TYPES) { cerr << "error: too many types of labels." << endl; exit(1); } - for (vector<string>::const_iterator j = mes.features.begin(); j != mes.features.end(); j++) { + for (vector<string>::const_iterator j = mes.features.begin(); + j != mes.features.end(); j++) { s.positive_features.push_back(_featurename_bag.Put(*j)); } - for (vector<pair<string, double> >::const_iterator j = mes.rvfeatures.begin(); j != mes.rvfeatures.end(); j++) { - s.rvfeatures.push_back(pair<int, double>(_featurename_bag.Put(j->first), j->second)); + for (vector<pair<string, double> >::const_iterator j = mes.rvfeatures.begin(); + j != mes.rvfeatures.end(); j++) { + s.rvfeatures.push_back( + pair<int, double>(_featurename_bag.Put(j->first), j->second)); } if (_ref_modelp != NULL) { - ME_Sample tmp = mes;; + ME_Sample tmp = mes; + ; s.ref_pd = _ref_modelp->classify(tmp); } // cout << s.label << "\t"; - // for (vector<int>::const_iterator j = s.positive_features.begin(); j != s.positive_features.end(); j++){ + // for (vector<int>::const_iterator j = s.positive_features.begin(); j != + // s.positive_features.end(); j++){ // cout << *j << " "; // } // cout << endl; - + _vs.push_back(s); } -int -ME_Model::train() -{ +int ME_Model::train() { if (_l1reg > 0 && _l2reg > 0) { - cerr << "error: L1 and L2 regularizers cannot be used simultaneously." << endl; + cerr << "error: L1 and L2 regularizers cannot be used simultaneously." + << endl; return 0; } if (_vs.size() == 0) { @@ -307,20 +321,22 @@ ME_Model::train() return 0; } if (_nheldout >= (int)_vs.size()) { - cerr << "error: too much heldout data. no training data is available." << endl; + cerr << "error: too much heldout data. no training data is available." + << endl; return 0; } // if (_nheldout > 0) random_shuffle(_vs.begin(), _vs.end()); int max_label = 0; - for (std::vector<Sample>::const_iterator i = _vs.begin(); i != _vs.end(); i++) { + for (std::vector<Sample>::const_iterator i = _vs.begin(); i != _vs.end(); + i++) { max_label = max(max_label, i->label); } _num_classes = max_label + 1; if (_num_classes != _label_bag.Size()) { cerr << "warning: _num_class != _label_bag.Size()" << endl; } - + if (_ref_modelp != NULL) { cerr << "setting reference distribution..."; for (int i = 0; i < _ref_modelp->num_classes(); i++) { @@ -332,7 +348,7 @@ ME_Model::train() } cerr << "done" << endl; } - + for (int i = 0; i < _nheldout; i++) { _heldout.push_back(_vs.back()); _vs.pop_back(); @@ -362,25 +378,28 @@ ME_Model::train() _vee[i] = 0; } for (int n = 0; n < (int)_vs.size(); n++) { - const Sample * i = &_vs[n]; - for (vector<int>::const_iterator j = i->positive_features.begin(); j != i->positive_features.end(); j++){ - for (vector<int>::const_iterator k = _feature2mef[*j].begin(); k != _feature2mef[*j].end(); k++) { - if (_fb.Feature(*k).label() == i->label) _vee[*k] += 1.0; + const Sample* i = &_vs[n]; + for (vector<int>::const_iterator j = i->positive_features.begin(); + j != i->positive_features.end(); j++) { + for (vector<int>::const_iterator k = _feature2mef[*j].begin(); + k != _feature2mef[*j].end(); k++) { + if (_fb.Feature(*k).label() == i->label) _vee[*k] += 1.0; } } - for (vector<pair<int, double> >::const_iterator j = i->rvfeatures.begin(); j != i->rvfeatures.end(); j++) { - for (vector<int>::const_iterator k = _feature2mef[j->first].begin(); k != _feature2mef[j->first].end(); k++) { - if (_fb.Feature(*k).label() == i->label) _vee[*k] += j->second; + for (vector<pair<int, double> >::const_iterator j = i->rvfeatures.begin(); + j != i->rvfeatures.end(); j++) { + for (vector<int>::const_iterator k = _feature2mef[j->first].begin(); + k != _feature2mef[j->first].end(); k++) { + if (_fb.Feature(*k).label() == i->label) _vee[*k] += j->second; } } - } for (int i = 0; i < _fb.Size(); i++) { _vee[i] /= _vs.size(); } cerr << "done" << endl; - + _vl.resize(_fb.Size()); for (int i = 0; i < _fb.Size(); i++) _vl[i] = 0.0; @@ -399,13 +418,12 @@ ME_Model::train() return 0; } -void -ME_Model::get_features(list< pair< pair<string, string>, double> > & fl) -{ +void ME_Model::get_features(list<pair<pair<string, string>, double> >& fl) { fl.clear(); // for (int i = 0; i < _fb.Size(); i++) { // ME_Feature f = _fb.Feature(i); - // fl.push_back( make_pair(make_pair(_label_bag.Str(f.label()), _featurename_bag.Str(f.feature())), _vl[i])); + // fl.push_back( make_pair(make_pair(_label_bag.Str(f.label()), + // _featurename_bag.Str(f.feature())), _vl[i])); // } for (MiniStringBag::map_type::const_iterator i = _featurename_bag.begin(); i != _featurename_bag.end(); i++) { @@ -414,14 +432,12 @@ ME_Model::get_features(list< pair< pair<string, string>, double> > & fl) string history = i->first; int id = _fb.Id(ME_Feature(j, i->second)); if (id < 0) continue; - fl.push_back( make_pair(make_pair(label, history), _vl[id]) ); + fl.push_back(make_pair(make_pair(label, history), _vl[id])); } } } -void -ME_Model::clear() -{ +void ME_Model::clear() { _vl.clear(); _label_bag.Clear(); _featurename_bag.Clear(); @@ -433,10 +449,8 @@ ME_Model::clear() _heldout.clear(); } -bool -ME_Model::load_from_file(const string & filename) -{ - FILE * fp = fopen(filename.c_str(), "r"); +bool ME_Model::load_from_file(const string& filename) { + FILE* fp = fopen(filename.c_str(), "r"); if (!fp) { cerr << "error: cannot open " << filename << "!" << endl; return false; @@ -447,22 +461,22 @@ ME_Model::load_from_file(const string & filename) _featurename_bag.Clear(); _fb.Clear(); char buf[1024]; - while(fgets(buf, 1024, fp)) { + while (fgets(buf, 1024, fp)) { string line(buf); string::size_type t1 = line.find_first_of('\t'); string::size_type t2 = line.find_last_of('\t'); string classname = line.substr(0, t1); - string featurename = line.substr(t1 + 1, t2 - (t1 + 1) ); + string featurename = line.substr(t1 + 1, t2 - (t1 + 1)); float lambda; - string w = line.substr(t2+1); + string w = line.substr(t2 + 1); sscanf(w.c_str(), "%f", &lambda); - + int label = _label_bag.Put(classname); int feature = _featurename_bag.Put(featurename); _fb.Put(ME_Feature(label, feature)); _vl.push_back(lambda); } - + _num_classes = _label_bag.Size(); init_feature2mef(); @@ -472,9 +486,7 @@ ME_Model::load_from_file(const string & filename) return true; } -void -ME_Model::init_feature2mef() -{ +void ME_Model::init_feature2mef() { _feature2mef.clear(); for (int i = 0; i < _featurename_bag.Size(); i++) { vector<int> vi; @@ -486,9 +498,7 @@ ME_Model::init_feature2mef() } } -bool -ME_Model::load_from_array(const ME_Model_Data data[]) -{ +bool ME_Model::load_from_array(const ME_Model_Data data[]) { _vl.clear(); for (int i = 0;; i++) { if (string(data[i].label) == "///") break; @@ -500,14 +510,12 @@ ME_Model::load_from_array(const ME_Model_Data data[]) _num_classes = _label_bag.Size(); init_feature2mef(); - + return true; } -bool -ME_Model::save_to_file(const string & filename, const double th) const -{ - FILE * fp = fopen(filename.c_str(), "w"); +bool ME_Model::save_to_file(const string& filename, const double th) const { + FILE* fp = fopen(filename.c_str(), "w"); if (!fp) { cerr << "error: cannot open " << filename << "!" << endl; return false; @@ -516,7 +524,8 @@ ME_Model::save_to_file(const string & filename, const double th) const // for (int i = 0; i < _fb.Size(); i++) { // if (_vl[i] == 0) continue; // ignore zero-weight features // ME_Feature f = _fb.Feature(i); - // fprintf(fp, "%s\t%s\t%f\n", _label_bag.Str(f.label()).c_str(), _featurename_bag.Str(f.feature()).c_str(), _vl[i]); + // fprintf(fp, "%s\t%s\t%f\n", _label_bag.Str(f.label()).c_str(), + // _featurename_bag.Str(f.feature()).c_str(), _vl[i]); // } for (MiniStringBag::map_type::const_iterator i = _featurename_bag.begin(); i != _featurename_bag.end(); i++) { @@ -525,8 +534,8 @@ ME_Model::save_to_file(const string & filename, const double th) const string history = i->first; int id = _fb.Id(ME_Feature(j, i->second)); if (id < 0) continue; - if (_vl[id] == 0) continue; // ignore zero-weight features - if (fabs(_vl[id]) < th) continue; // cut off low-weight features + if (_vl[id] == 0) continue; // ignore zero-weight features + if (fabs(_vl[id]) < th) continue; // cut off low-weight features fprintf(fp, "%s\t%s\t%f\n", label.c_str(), history.c_str(), _vl[id]); } } @@ -536,9 +545,7 @@ ME_Model::save_to_file(const string & filename, const double th) const return true; } -void -ME_Model::set_ref_dist(Sample & s) const -{ +void ME_Model::set_ref_dist(Sample& s) const { vector<double> v0 = s.ref_pd; vector<double> v(_num_classes); for (unsigned int i = 0; i < v.size(); i++) { @@ -548,14 +555,12 @@ ME_Model::set_ref_dist(Sample & s) const if (id_ref != -1) { v[i] = v0[id_ref]; } - if (v[i] == 0) v[i] = 0.001; // to avoid -inf logl + if (v[i] == 0) v[i] = 0.001; // to avoid -inf logl } s.ref_pd = v; } - -int -ME_Model::classify(const Sample & nbs, vector<double> & membp) const -{ + +int ME_Model::classify(const Sample& nbs, vector<double>& membp) const { // vector<double> membp(_num_classes); assert(_num_classes == (int)membp.size()); conditional_probability(nbs, membp); @@ -563,22 +568,24 @@ ME_Model::classify(const Sample & nbs, vector<double> & membp) const double max = 0.0; for (int i = 0; i < (int)membp.size(); i++) { // cout << membp[i] << " "; - if (membp[i] > max) { max_label = i; max = membp[i]; } + if (membp[i] > max) { + max_label = i; + max = membp[i]; + } } // cout << endl; return max_label; } -vector<double> -ME_Model::classify(ME_Sample & mes) const -{ +vector<double> ME_Model::classify(ME_Sample& mes) const { Sample s; - for (vector<string>::const_iterator j = mes.features.begin(); j != mes.features.end(); j++) { + for (vector<string>::const_iterator j = mes.features.begin(); + j != mes.features.end(); j++) { int id = _featurename_bag.Id(*j); - if (id >= 0) - s.positive_features.push_back(id); + if (id >= 0) s.positive_features.push_back(id); } - for (vector<pair<string, double> >::const_iterator j = mes.rvfeatures.begin(); j != mes.rvfeatures.end(); j++) { + for (vector<pair<string, double> >::const_iterator j = mes.rvfeatures.begin(); + j != mes.rvfeatures.end(); j++) { int id = _featurename_bag.Id(j->first); if (id >= 0) { s.rvfeatures.push_back(pair<int, double>(id, j->second)); @@ -595,7 +602,6 @@ ME_Model::classify(ME_Sample & mes) const return vp; } - /* * $Log: maxent.cpp,v $ * Revision 1.1.1.1 2007/05/15 08:30:35 kyoshida @@ -695,4 +701,3 @@ ME_Model::classify(ME_Sample & mes) const * remove some comments * */ - |