From 867bca3e5fa0cdd63bf032e5859fb5092d9a4ca1 Mon Sep 17 00:00:00 2001 From: Patrick Simianer Date: Tue, 13 Mar 2012 09:15:46 +0100 Subject: polish --- dtrain/test/logreg_cd/log_reg.cc | 39 --------------------------------------- 1 file changed, 39 deletions(-) delete mode 100644 dtrain/test/logreg_cd/log_reg.cc (limited to 'dtrain/test/logreg_cd/log_reg.cc') diff --git a/dtrain/test/logreg_cd/log_reg.cc b/dtrain/test/logreg_cd/log_reg.cc deleted file mode 100644 index ec2331fe..00000000 --- a/dtrain/test/logreg_cd/log_reg.cc +++ /dev/null @@ -1,39 +0,0 @@ -#include "log_reg.h" - -#include -#include - -#include "sparse_vector.h" - -using namespace std; - -double LogisticRegression::ObjectiveAndGradient(const SparseVector& x, - const vector& training_instances, - SparseVector* g) const { - double cll = 0; - for (int i = 0; i < training_instances.size(); ++i) { - const double dotprod = training_instances[i].x_feature_map.dot(x); // TODO no bias, if bias, add x[0] - double lp_false = dotprod; - double lp_true = -dotprod; - if (0 < lp_true) { - lp_true += log1p(exp(-lp_true)); - lp_false = log1p(exp(lp_false)); - } else { - lp_true = log1p(exp(lp_true)); - lp_false += log1p(exp(-lp_false)); - } - lp_true *= -1; - lp_false *= -1; - if (training_instances[i].y) { // true label - cll -= lp_true; - (*g) -= training_instances[i].x_feature_map * exp(lp_false); - // (*g)[0] -= exp(lp_false); // bias - } else { // false label - cll -= lp_false; - (*g) += training_instances[i].x_feature_map * exp(lp_true); - // g += corpus[i].second * exp(lp_true); - } - } - return cll; -} - -- cgit v1.2.3