diff options
author | trevor.cohn <trevor.cohn@ec762483-ff6d-05da-a07a-a48fb63a330f> | 2010-07-10 16:49:23 +0000 |
---|---|---|
committer | trevor.cohn <trevor.cohn@ec762483-ff6d-05da-a07a-a48fb63a330f> | 2010-07-10 16:49:23 +0000 |
commit | bd120bac920142a659b0bfa0b9157cb6c547e423 (patch) | |
tree | 07144935a9a28fdea2719464feaf49368abf97ad /gi/posterior-regularisation | |
parent | 8eaa95d98b4ad1ce86ff87dd693b417c8dfdef6d (diff) |
Testing code for line search with simplex *equality* constraints and positivity constraints.
git-svn-id: https://ws10smt.googlecode.com/svn/trunk@217 ec762483-ff6d-05da-a07a-a48fb63a330f
Diffstat (limited to 'gi/posterior-regularisation')
-rw-r--r-- | gi/posterior-regularisation/linesearch.py | 58 | ||||
-rw-r--r-- | gi/posterior-regularisation/simplex_pg.py | 55 |
2 files changed, 113 insertions, 0 deletions
diff --git a/gi/posterior-regularisation/linesearch.py b/gi/posterior-regularisation/linesearch.py new file mode 100644 index 00000000..5a3f2e9c --- /dev/null +++ b/gi/posterior-regularisation/linesearch.py @@ -0,0 +1,58 @@ +## Automatically adapted for scipy Oct 07, 2005 by convertcode.py + +from scipy.optimize import minpack2 +import numpy + +import __builtin__ +pymin = __builtin__.min + +def line_search(f, myfprime, xk, pk, gfk, old_fval, old_old_fval, + args=(), c1=1e-4, c2=0.9, amax=50): + + fc = 0 + gc = 0 + phi0 = old_fval + derphi0 = numpy.dot(gfk,pk) + alpha1 = pymin(1.0,1.01*2*(phi0-old_old_fval)/derphi0) + # trevor: added this test + alpha1 = pymin(alpha1,amax) + + if isinstance(myfprime,type(())): + eps = myfprime[1] + fprime = myfprime[0] + newargs = (f,eps) + args + gradient = False + else: + fprime = myfprime + newargs = args + gradient = True + + xtol = 1e-14 + amin = 1e-8 + isave = numpy.zeros((2,), numpy.intc) + dsave = numpy.zeros((13,), float) + task = 'START' + fval = old_fval + gval = gfk + + while 1: + stp,fval,derphi,task = minpack2.dcsrch(alpha1, phi0, derphi0, c1, c2, + xtol, task, amin, amax,isave,dsave) + #print 'minpack2.dcsrch', alpha1, phi0, derphi0, c1, c2, xtol, task, amin, amax,isave,dsave + #print 'returns', stp,fval,derphi,task + + if task[:2] == 'FG': + alpha1 = stp + fval = f(xk+stp*pk,*args) + fc += 1 + gval = fprime(xk+stp*pk,*newargs) + if gradient: gc += 1 + else: fc += len(xk) + 1 + phi0 = fval + derphi0 = numpy.dot(gval,pk) + else: + break + + if task[:5] == 'ERROR' or task[1:4] == 'WARN': + stp = None # failed + return stp, fc, gc, fval, old_fval, gval diff --git a/gi/posterior-regularisation/simplex_pg.py b/gi/posterior-regularisation/simplex_pg.py new file mode 100644 index 00000000..5da796d3 --- /dev/null +++ b/gi/posterior-regularisation/simplex_pg.py @@ -0,0 +1,55 @@ +# +# Following Leunberger and Ye, Linear and Nonlinear Progamming, 3rd ed. p367 +# "The gradient projection method" +# applied to an equality constraint for a simplex. +# +# min f(x) +# s.t. x >= 0, sum_i x = d +# +# FIXME: enforce the positivity constraint - a limit on the line search? +# + +from numpy import * +from scipy import * +from linesearch import line_search +# local copy of scipy's Amijo line_search - wasn't enforcing alpha max correctly +import sys + +dims = 4 + +def f(x): + fv = x[0]*x[0] + x[1]*x[1] + x[2]*x[2] + x[3]*x[3] - 2*x[0] - 3*x[3] + # print 'evaluating f at', x, 'value', fv + return fv + +def g(x): + return array([2*x[0] - 2, 2*x[1], 2*x[2], 2*x[3]-3]) + +def pg(x): + gv = g(x) + return gv - sum(gv) / dims + +x = ones(dims) / dims +old_fval = None + +while True: + fv = f(x) + gv = g(x) + dv = pg(x) + + print 'x', x, 'f', fv, 'g', gv, 'd', dv + + if old_fval == None: + old_fval = fv + 0.1 + + # solve for maximum step size i.e. when positivity constraints kick in + # x - alpha d = 0 => alpha = x/d + amax = max(x/dv) + if amax < 1e-8: break + + stuff = line_search(f, pg, x, -dv, dv, fv, old_fval, amax=amax) + alpha = stuff[0] # Nb. can avoid next evaluation of f,g,d using 'stuff' + if alpha < 1e-8: break + x -= alpha * dv + + old_fval = fv |