summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortrevor.cohn <trevor.cohn@ec762483-ff6d-05da-a07a-a48fb63a330f>2010-07-10 16:49:23 +0000
committertrevor.cohn <trevor.cohn@ec762483-ff6d-05da-a07a-a48fb63a330f>2010-07-10 16:49:23 +0000
commit734cdcebfe66ee283e7ae535cec13a857f8b4a29 (patch)
tree50ddeb1f9334de1004c4c1f08c93a50201d666d1
parent3789855a42083b2fb4bc7562dcc619583dbe81e6 (diff)
Testing code for line search with simplex *equality* constraints and positivity constraints.
git-svn-id: https://ws10smt.googlecode.com/svn/trunk@217 ec762483-ff6d-05da-a07a-a48fb63a330f
-rw-r--r--gi/posterior-regularisation/linesearch.py58
-rw-r--r--gi/posterior-regularisation/simplex_pg.py55
2 files changed, 113 insertions, 0 deletions
diff --git a/gi/posterior-regularisation/linesearch.py b/gi/posterior-regularisation/linesearch.py
new file mode 100644
index 00000000..5a3f2e9c
--- /dev/null
+++ b/gi/posterior-regularisation/linesearch.py
@@ -0,0 +1,58 @@
+## Automatically adapted for scipy Oct 07, 2005 by convertcode.py
+
+from scipy.optimize import minpack2
+import numpy
+
+import __builtin__
+pymin = __builtin__.min
+
+def line_search(f, myfprime, xk, pk, gfk, old_fval, old_old_fval,
+ args=(), c1=1e-4, c2=0.9, amax=50):
+
+ fc = 0
+ gc = 0
+ phi0 = old_fval
+ derphi0 = numpy.dot(gfk,pk)
+ alpha1 = pymin(1.0,1.01*2*(phi0-old_old_fval)/derphi0)
+ # trevor: added this test
+ alpha1 = pymin(alpha1,amax)
+
+ if isinstance(myfprime,type(())):
+ eps = myfprime[1]
+ fprime = myfprime[0]
+ newargs = (f,eps) + args
+ gradient = False
+ else:
+ fprime = myfprime
+ newargs = args
+ gradient = True
+
+ xtol = 1e-14
+ amin = 1e-8
+ isave = numpy.zeros((2,), numpy.intc)
+ dsave = numpy.zeros((13,), float)
+ task = 'START'
+ fval = old_fval
+ gval = gfk
+
+ while 1:
+ stp,fval,derphi,task = minpack2.dcsrch(alpha1, phi0, derphi0, c1, c2,
+ xtol, task, amin, amax,isave,dsave)
+ #print 'minpack2.dcsrch', alpha1, phi0, derphi0, c1, c2, xtol, task, amin, amax,isave,dsave
+ #print 'returns', stp,fval,derphi,task
+
+ if task[:2] == 'FG':
+ alpha1 = stp
+ fval = f(xk+stp*pk,*args)
+ fc += 1
+ gval = fprime(xk+stp*pk,*newargs)
+ if gradient: gc += 1
+ else: fc += len(xk) + 1
+ phi0 = fval
+ derphi0 = numpy.dot(gval,pk)
+ else:
+ break
+
+ if task[:5] == 'ERROR' or task[1:4] == 'WARN':
+ stp = None # failed
+ return stp, fc, gc, fval, old_fval, gval
diff --git a/gi/posterior-regularisation/simplex_pg.py b/gi/posterior-regularisation/simplex_pg.py
new file mode 100644
index 00000000..5da796d3
--- /dev/null
+++ b/gi/posterior-regularisation/simplex_pg.py
@@ -0,0 +1,55 @@
+#
+# Following Leunberger and Ye, Linear and Nonlinear Progamming, 3rd ed. p367
+# "The gradient projection method"
+# applied to an equality constraint for a simplex.
+#
+# min f(x)
+# s.t. x >= 0, sum_i x = d
+#
+# FIXME: enforce the positivity constraint - a limit on the line search?
+#
+
+from numpy import *
+from scipy import *
+from linesearch import line_search
+# local copy of scipy's Amijo line_search - wasn't enforcing alpha max correctly
+import sys
+
+dims = 4
+
+def f(x):
+ fv = x[0]*x[0] + x[1]*x[1] + x[2]*x[2] + x[3]*x[3] - 2*x[0] - 3*x[3]
+ # print 'evaluating f at', x, 'value', fv
+ return fv
+
+def g(x):
+ return array([2*x[0] - 2, 2*x[1], 2*x[2], 2*x[3]-3])
+
+def pg(x):
+ gv = g(x)
+ return gv - sum(gv) / dims
+
+x = ones(dims) / dims
+old_fval = None
+
+while True:
+ fv = f(x)
+ gv = g(x)
+ dv = pg(x)
+
+ print 'x', x, 'f', fv, 'g', gv, 'd', dv
+
+ if old_fval == None:
+ old_fval = fv + 0.1
+
+ # solve for maximum step size i.e. when positivity constraints kick in
+ # x - alpha d = 0 => alpha = x/d
+ amax = max(x/dv)
+ if amax < 1e-8: break
+
+ stuff = line_search(f, pg, x, -dv, dv, fv, old_fval, amax=amax)
+ alpha = stuff[0] # Nb. can avoid next evaluation of f,g,d using 'stuff'
+ if alpha < 1e-8: break
+ x -= alpha * dv
+
+ old_fval = fv