summaryrefslogtreecommitdiff
path: root/gi/posterior-regularisation/prjava/src/optimization/linesearch/ArmijoLineSearchMinimizationAlongProjectionArc.java
diff options
context:
space:
mode:
authordesaicwtf <desaicwtf@ec762483-ff6d-05da-a07a-a48fb63a330f>2010-07-09 16:59:55 +0000
committerdesaicwtf <desaicwtf@ec762483-ff6d-05da-a07a-a48fb63a330f>2010-07-09 16:59:55 +0000
commit7f69c868c41e4b36eecf9d3b1dc22f3f3aa1540c (patch)
treed22aa7b6f47248ed6da02b77a0680b6b83e67b63 /gi/posterior-regularisation/prjava/src/optimization/linesearch/ArmijoLineSearchMinimizationAlongProjectionArc.java
parent4e37402323c3227e90a89345387834e149732b5c (diff)
add optimization library source code
git-svn-id: https://ws10smt.googlecode.com/svn/trunk@204 ec762483-ff6d-05da-a07a-a48fb63a330f
Diffstat (limited to 'gi/posterior-regularisation/prjava/src/optimization/linesearch/ArmijoLineSearchMinimizationAlongProjectionArc.java')
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/linesearch/ArmijoLineSearchMinimizationAlongProjectionArc.java141
1 files changed, 141 insertions, 0 deletions
diff --git a/gi/posterior-regularisation/prjava/src/optimization/linesearch/ArmijoLineSearchMinimizationAlongProjectionArc.java b/gi/posterior-regularisation/prjava/src/optimization/linesearch/ArmijoLineSearchMinimizationAlongProjectionArc.java
new file mode 100644
index 00000000..e153f2da
--- /dev/null
+++ b/gi/posterior-regularisation/prjava/src/optimization/linesearch/ArmijoLineSearchMinimizationAlongProjectionArc.java
@@ -0,0 +1,141 @@
+package optimization.linesearch;
+
+import optimization.gradientBasedMethods.ProjectedObjective;
+import optimization.util.Interpolation;
+import optimization.util.MathUtils;
+
+
+
+
+
+/**
+ * Implements Armijo Rule Line search along the projection arc (Non-Linear Programming page 230)
+ * To be used with Projected gradient Methods.
+ *
+ * Recall that armijo tries successive step sizes alpha until the sufficient decrease is satisfied:
+ * f(x+alpha*direction) < f(x) + alpha*c1*grad(f)*direction
+ *
+ * In this case we are optimizing over a convex set X so we must guarantee that the new point stays inside the
+ * constraints.
+ * First the direction as to be feasible (inside constraints) and will be define as:
+ * d = (x_k_f - x_k) where x_k_f is a feasible point.
+ * so the armijo condition can be rewritten as:
+ * f(x+alpha(x_k_f - x_k)) < f(x) + c1*grad(f)*(x_k_f - x_k)
+ * and x_k_f is defined as:
+ * [x_k-alpha*grad(f)]+
+ * where []+ mean a projection to the feasibility set.
+ * So this means that we take a step on the negative gradient (gradient descent) and then obtain then project
+ * that point to the feasibility set.
+ * Note that if the point is already feasible then we are back to the normal armijo rule.
+ *
+ * @author javg
+ *
+ */
+public class ArmijoLineSearchMinimizationAlongProjectionArc implements LineSearchMethod{
+
+ /**
+ * How much should the step size decrease at each iteration.
+ */
+ double contractionFactor = 0.5;
+ double c1 = 0.0001;
+
+
+ double initialStep;
+ int maxIterations = 100;
+
+
+ double sigma1 = 0.1;
+ double sigma2 = 0.9;
+
+ //Experiment
+ double previousStepPicked = -1;;
+ double previousInitGradientDot = -1;
+ double currentInitGradientDot = -1;
+
+ GenericPickFirstStep strategy;
+
+
+ public void reset(){
+ previousStepPicked = -1;;
+ previousInitGradientDot = -1;
+ currentInitGradientDot = -1;
+ }
+
+
+ public ArmijoLineSearchMinimizationAlongProjectionArc(){
+ this.initialStep = 1;
+ }
+
+ public ArmijoLineSearchMinimizationAlongProjectionArc(GenericPickFirstStep strategy){
+ this.strategy = strategy;
+ this.initialStep = strategy.getFirstStep(this);
+ }
+
+
+ public void setInitialStep(double initial){
+ this.initialStep = initial;
+ }
+
+ /**
+ *
+ */
+
+ public double getStepSize(DifferentiableLineSearchObjective o) {
+
+
+ //Should update all in the objective
+ initialStep = strategy.getFirstStep(this);
+ o.updateAlpha(initialStep);
+ previousInitGradientDot=currentInitGradientDot;
+ currentInitGradientDot=o.getCurrentGradient();
+ int nrIterations = 0;
+
+ //Armijo rule, the current value has to be smaller than the original value plus a small step of the gradient
+ while(o.getCurrentValue() >
+ o.getOriginalValue() + c1*(o.getCurrentGradient())){
+// System.out.println("curr value "+o.getCurrentValue());
+// System.out.println("original value "+o.getOriginalValue());
+// System.out.println("GRADIENT decrease" +(MathUtils.dotProduct(o.o.gradient,
+// MathUtils.arrayMinus(o.originalParameters,((ProjectedObjective)o.o).auxParameters))));
+// System.out.println("GRADIENT SAVED" + o.getCurrentGradient());
+ if(nrIterations >= maxIterations){
+ System.out.println("Could not find a step leaving line search with -1");
+ o.printLineSearchSteps();
+ return -1;
+ }
+ double alpha=o.getAlpha();
+ double alphaTemp =
+ Interpolation.quadraticInterpolation(o.getOriginalValue(), o.getInitialGradient(), alpha, o.getCurrentValue());
+ if(alphaTemp >= sigma1 || alphaTemp <= sigma2*o.getAlpha()){
+ alpha = alphaTemp;
+ }else{
+ alpha = alpha*contractionFactor;
+ }
+// double alpha =obj.getAlpha()*contractionFactor;
+ o.updateAlpha(alpha);
+ nrIterations++;
+ }
+// System.out.println("curr value "+o.getCurrentValue());
+// System.out.println("original value "+o.getOriginalValue());
+// System.out.println("sufficient decrease" +c1*o.getCurrentGradient());
+// System.out.println("Leavning line search used:");
+// o.printSmallLineSearchSteps();
+
+ previousStepPicked = o.getAlpha();
+ return o.getAlpha();
+ }
+
+ public double getInitialGradient() {
+ return currentInitGradientDot;
+
+ }
+
+ public double getPreviousInitialGradient() {
+ return previousInitGradientDot;
+ }
+
+ public double getPreviousStepUsed() {
+ return previousStepPicked;
+ }
+
+}