diff options
author | Patrick Simianer <p@simianer.de> | 2011-09-24 23:46:49 +0200 |
---|---|---|
committer | Patrick Simianer <p@simianer.de> | 2011-09-24 23:46:49 +0200 |
commit | 5e1ab3481551607f1c2a10027049044cd41f78ab (patch) | |
tree | b89cc3587f2e5d34f14f4b977f8a0909f62fe8e1 /dtrain/test/logreg/log_reg.cc | |
parent | 5584f300821e20369c25343222cd0e8faa393523 (diff) |
cleaning up
Diffstat (limited to 'dtrain/test/logreg/log_reg.cc')
-rw-r--r-- | dtrain/test/logreg/log_reg.cc | 39 |
1 files changed, 39 insertions, 0 deletions
diff --git a/dtrain/test/logreg/log_reg.cc b/dtrain/test/logreg/log_reg.cc new file mode 100644 index 00000000..ec2331fe --- /dev/null +++ b/dtrain/test/logreg/log_reg.cc @@ -0,0 +1,39 @@ +#include "log_reg.h" + +#include <vector> +#include <cmath> + +#include "sparse_vector.h" + +using namespace std; + +double LogisticRegression::ObjectiveAndGradient(const SparseVector<double>& x, + const vector<TrainingInstance>& training_instances, + SparseVector<double>* g) const { + double cll = 0; + for (int i = 0; i < training_instances.size(); ++i) { + const double dotprod = training_instances[i].x_feature_map.dot(x); // TODO no bias, if bias, add x[0] + double lp_false = dotprod; + double lp_true = -dotprod; + if (0 < lp_true) { + lp_true += log1p(exp(-lp_true)); + lp_false = log1p(exp(lp_false)); + } else { + lp_true = log1p(exp(lp_true)); + lp_false += log1p(exp(-lp_false)); + } + lp_true *= -1; + lp_false *= -1; + if (training_instances[i].y) { // true label + cll -= lp_true; + (*g) -= training_instances[i].x_feature_map * exp(lp_false); + // (*g)[0] -= exp(lp_false); // bias + } else { // false label + cll -= lp_false; + (*g) += training_instances[i].x_feature_map * exp(lp_true); + // g += corpus[i].second * exp(lp_true); + } + } + return cll; +} + |