summaryrefslogtreecommitdiff
path: root/utils/synutils/maxent-3.0/owlqn.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'utils/synutils/maxent-3.0/owlqn.cpp')
-rw-r--r--utils/synutils/maxent-3.0/owlqn.cpp46
1 files changed, 18 insertions, 28 deletions
diff --git a/utils/synutils/maxent-3.0/owlqn.cpp b/utils/synutils/maxent-3.0/owlqn.cpp
index 7b2cea7d..c3a0f0da 100644
--- a/utils/synutils/maxent-3.0/owlqn.cpp
+++ b/utils/synutils/maxent-3.0/owlqn.cpp
@@ -8,29 +8,24 @@
using namespace std;
-const static int M = LBFGS_M;
+const static int M = LBFGS_M;
const static double LINE_SEARCH_ALPHA = 0.1;
-const static double LINE_SEARCH_BETA = 0.5;
+const static double LINE_SEARCH_BETA = 0.5;
// stopping criteria
-int OWLQN_MAX_ITER = 300;
+int OWLQN_MAX_ITER = 300;
const static double MIN_GRAD_NORM = 0.0001;
+Vec approximate_Hg(const int iter, const Vec& grad, const Vec s[],
+ const Vec y[], const double z[]);
-Vec approximate_Hg(const int iter, const Vec & grad,
- const Vec s[], const Vec y[], const double z[]);
-
-
-inline int sign(double x)
-{
+inline int sign(double x) {
if (x > 0) return 1;
if (x < 0) return -1;
return 0;
};
-static Vec
-pseudo_gradient(const Vec & x, const Vec & grad0, const double C)
-{
+static Vec pseudo_gradient(const Vec& x, const Vec& grad0, const double C) {
Vec grad = grad0;
for (size_t i = 0; i < x.Size(); i++) {
if (x[i] != 0) {
@@ -53,9 +48,8 @@ pseudo_gradient(const Vec & x, const Vec & grad0, const double C)
return grad;
}
-double
-ME_Model::regularized_func_grad(const double C, const Vec & x, Vec & grad)
-{
+double ME_Model::regularized_func_grad(const double C, const Vec& x,
+ Vec& grad) {
double f = FunctionGradient(x.STLVec(), grad.STLVec());
for (size_t i = 0; i < x.Size(); i++) {
f += C * fabs(x[i]);
@@ -64,11 +58,9 @@ ME_Model::regularized_func_grad(const double C, const Vec & x, Vec & grad)
return f;
}
-double
-ME_Model::constrained_line_search(double C,
- const Vec & x0, const Vec & grad0, const double f0,
- const Vec & dx, Vec & x, Vec & grad1)
-{
+double ME_Model::constrained_line_search(double C, const Vec& x0,
+ const Vec& grad0, const double f0,
+ const Vec& dx, Vec& x, Vec& grad1) {
// compute the orthant to explore
Vec orthant = x0;
for (size_t i = 0; i < orthant.Size(); i++) {
@@ -93,9 +85,8 @@ ME_Model::constrained_line_search(double C,
return f;
}
-vector<double>
-ME_Model::perform_OWLQN(const vector<double> & x0, const double C)
-{
+vector<double> ME_Model::perform_OWLQN(const vector<double>& x0,
+ const double C) {
const size_t dim = x0.size();
Vec x = x0;
@@ -108,18 +99,18 @@ ME_Model::perform_OWLQN(const vector<double> & x0, const double C)
for (int iter = 0; iter < OWLQN_MAX_ITER; iter++) {
Vec pg = pseudo_gradient(x, grad, C);
- fprintf(stderr, "%3d obj(err) = %f (%6.4f)", iter+1, -f, _train_error);
+ fprintf(stderr, "%3d obj(err) = %f (%6.4f)", iter + 1, -f, _train_error);
if (_nheldout > 0) {
const double heldout_logl = heldout_likelihood();
- fprintf(stderr, " heldout_logl(err) = %f (%6.4f)", heldout_logl, _heldout_error);
+ fprintf(stderr, " heldout_logl(err) = %f (%6.4f)", heldout_logl,
+ _heldout_error);
}
fprintf(stderr, "\n");
if (sqrt(dot_product(pg, pg)) < MIN_GRAD_NORM) break;
dx = -1 * approximate_Hg(iter, pg, s, y, z);
- if (dot_product(dx, pg) >= 0)
- dx.Project(-1 * pg);
+ if (dot_product(dx, pg) >= 0) dx.Project(-1 * pg);
Vec x1(dim), grad1(dim);
f = constrained_line_search(C, x, pg, f, dx, x1, grad1);
@@ -134,4 +125,3 @@ ME_Model::perform_OWLQN(const vector<double> & x0, const double C)
return x.STLVec();
}
-