summaryrefslogtreecommitdiff
path: root/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods
diff options
context:
space:
mode:
Diffstat (limited to 'gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods')
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/AbstractGradientBaseMethod.java120
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ConjugateGradient.java92
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/DebugHelpers.java65
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/GradientDescent.java19
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/LBFGS.java234
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/Objective.java87
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/Optimizer.java19
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedAbstractGradientBaseMethod.java11
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedGradientDescent.java154
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedObjective.java29
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedOptimizer.java10
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/stats/OptimizerStats.java86
-rw-r--r--gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/stats/ProjectedOptimizerStats.java70
13 files changed, 0 insertions, 996 deletions
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/AbstractGradientBaseMethod.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/AbstractGradientBaseMethod.java
deleted file mode 100644
index 2fcb7990..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/AbstractGradientBaseMethod.java
+++ /dev/null
@@ -1,120 +0,0 @@
-package optimization.gradientBasedMethods;
-
-import optimization.gradientBasedMethods.stats.OptimizerStats;
-import optimization.linesearch.DifferentiableLineSearchObjective;
-import optimization.linesearch.LineSearchMethod;
-import optimization.stopCriteria.StopingCriteria;
-import optimization.util.MathUtils;
-
-/**
- *
- * @author javg
- *
- */
-public abstract class AbstractGradientBaseMethod implements Optimizer{
-
- protected int maxNumberOfIterations=10000;
-
-
-
- protected int currentProjectionIteration;
- protected double currValue;
- protected double previousValue = Double.MAX_VALUE;;
- protected double step;
- protected double[] gradient;
- public double[] direction;
-
- //Original values
- protected double originalGradientL2Norm;
-
- protected LineSearchMethod lineSearch;
- DifferentiableLineSearchObjective lso;
-
-
- public void reset(){
- direction = null;
- gradient = null;
- previousValue = Double.MAX_VALUE;
- currentProjectionIteration = 0;
- originalGradientL2Norm = 0;
- step = 0;
- currValue = 0;
- }
-
- public void initializeStructures(Objective o,OptimizerStats stats, StopingCriteria stop){
- lso = new DifferentiableLineSearchObjective(o);
- }
- public void updateStructuresBeforeStep(Objective o,OptimizerStats stats, StopingCriteria stop){
- }
-
- public void updateStructuresAfterStep(Objective o,OptimizerStats stats, StopingCriteria stop){
- }
-
- public boolean optimize(Objective o,OptimizerStats stats, StopingCriteria stop){
- //Initialize structures
-
- stats.collectInitStats(this, o);
- direction = new double[o.getNumParameters()];
- initializeStructures(o, stats, stop);
- for (currentProjectionIteration = 1; currentProjectionIteration < maxNumberOfIterations; currentProjectionIteration++){
- //System.out.println("\tgradient descent iteration " + currentProjectionIteration);
- //System.out.print("\tparameters:" );
- //o.printParameters();
- previousValue = currValue;
- currValue = o.getValue();
- gradient = o.getGradient();
- if(stop.stopOptimization(o)){
- stats.collectFinalStats(this, o);
- return true;
- }
-
- getDirection();
- if(MathUtils.dotProduct(gradient, direction) > 0){
- System.out.println("Not a descent direction");
- System.out.println(" current stats " + stats.prettyPrint(1));
- System.exit(-1);
- }
- updateStructuresBeforeStep(o, stats, stop);
- lso.reset(direction);
- step = lineSearch.getStepSize(lso);
- //System.out.println("\t\tLeave with step: " + step);
- if(step==-1){
- System.out.println("Failed to find step");
- stats.collectFinalStats(this, o);
- return false;
- }
- updateStructuresAfterStep( o, stats, stop);
-// previousValue = currValue;
-// currValue = o.getValue();
-// gradient = o.getGradient();
- stats.collectIterationStats(this, o);
- }
- stats.collectFinalStats(this, o);
- return false;
- }
-
-
- public int getCurrentIteration() {
- return currentProjectionIteration;
- }
-
-
- /**
- * Method specific
- */
- public abstract double[] getDirection();
-
- public double getCurrentStep() {
- return step;
- }
-
-
-
- public void setMaxIterations(int max) {
- maxNumberOfIterations = max;
- }
-
- public double getCurrentValue() {
- return currValue;
- }
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ConjugateGradient.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ConjugateGradient.java
deleted file mode 100644
index 28295729..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ConjugateGradient.java
+++ /dev/null
@@ -1,92 +0,0 @@
-package optimization.gradientBasedMethods;
-
-import optimization.gradientBasedMethods.stats.OptimizerStats;
-import optimization.linesearch.DifferentiableLineSearchObjective;
-import optimization.linesearch.LineSearchMethod;
-import optimization.stopCriteria.StopingCriteria;
-import optimization.util.MathUtils;
-
-
-
-public class ConjugateGradient extends AbstractGradientBaseMethod{
-
-
- double[] previousGradient;
- double[] previousDirection;
-
- public ConjugateGradient(LineSearchMethod lineSearch) {
- this.lineSearch = lineSearch;
- }
-
- public void reset(){
- super.reset();
- java.util.Arrays.fill(previousDirection, 0);
- java.util.Arrays.fill(previousGradient, 0);
- }
-
- public void initializeStructures(Objective o,OptimizerStats stats, StopingCriteria stop){
- super.initializeStructures(o, stats, stop);
- previousGradient = new double[o.getNumParameters()];
- previousDirection = new double[o.getNumParameters()];
- }
- public void updateStructuresBeforeStep(Objective o,OptimizerStats stats, StopingCriteria stop){
- System.arraycopy(gradient, 0, previousGradient, 0, gradient.length);
- System.arraycopy(direction, 0, previousDirection, 0, direction.length);
- }
-
-// public boolean optimize(Objective o,OptimizerStats stats, StopingCriteria stop){
-// DifferentiableLineSearchObjective lso = new DifferentiableLineSearchObjective(o);
-// stats.collectInitStats(this, o);
-// direction = new double[o.getNumParameters()];
-// initializeStructures(o, stats, stop);
-// for (currentProjectionIteration = 0; currentProjectionIteration < maxNumberOfIterations; currentProjectionIteration++){
-// previousValue = currValue;
-// currValue = o.getValue();
-// gradient =o.getGradient();
-// if(stop.stopOptimization(gradient)){
-// stats.collectFinalStats(this, o);
-// return true;
-// }
-// getDirection();
-// updateStructures(o, stats, stop);
-// lso.reset(direction);
-// step = lineSearch.getStepSize(lso);
-// if(step==-1){
-// System.out.println("Failed to find a step size");
-// System.out.println("Failed to find step");
-// stats.collectFinalStats(this, o);
-// return false;
-// }
-//
-// stats.collectIterationStats(this, o);
-// }
-// stats.collectFinalStats(this, o);
-// return false;
-// }
-
- public double[] getDirection(){
- direction = MathUtils.negation(gradient);
- if(currentProjectionIteration != 1){
- //Using Polak-Ribiere method (book equation 5.45)
- double b = MathUtils.dotProduct(gradient, MathUtils.arrayMinus(gradient, previousGradient))
- /MathUtils.dotProduct(previousGradient, previousGradient);
- if(b<0){
- System.out.println("Defaulting to gradient descent");
- b = Math.max(b, 0);
- }
- MathUtils.plusEquals(direction, previousDirection, b);
- //Debug code
- if(MathUtils.dotProduct(direction, gradient) > 0){
- System.out.println("Not an descent direction reseting to gradien");
- direction = MathUtils.negation(gradient);
- }
- }
- return direction;
- }
-
-
-
-
-
-
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/DebugHelpers.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/DebugHelpers.java
deleted file mode 100644
index 6dc4ef6c..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/DebugHelpers.java
+++ /dev/null
@@ -1,65 +0,0 @@
-package optimization.gradientBasedMethods;
-
-import java.util.ArrayList;
-
-import optimization.util.MathUtils;
-
-
-
-public class DebugHelpers {
- public static void getLineSearchGraph(Objective o, double[] direction,
- double[] parameters, double originalObj,
- double originalDot, double c1, double c2){
- ArrayList<Double> stepS = new ArrayList<Double>();
- ArrayList<Double> obj = new ArrayList<Double>();
- ArrayList<Double> norm = new ArrayList<Double>();
- double[] gradient = new double[o.getNumParameters()];
- double[] newParameters = parameters.clone();
- MathUtils.plusEquals(newParameters,direction,0);
- o.setParameters(newParameters);
- double minValue = o.getValue();
- int valuesBiggerThanMax = 0;
- for(double step = 0; step < 2; step +=0.01 ){
- newParameters = parameters.clone();
- MathUtils.plusEquals(newParameters,direction,step);
- o.setParameters(newParameters);
- double newValue = o.getValue();
- gradient = o.getGradient();
- double newgradDirectionDot = MathUtils.dotProduct(gradient,direction);
- stepS.add(step);
- obj.add(newValue);
- norm.add(newgradDirectionDot);
- if(newValue <= minValue){
- minValue = newValue;
- }else{
- valuesBiggerThanMax++;
- }
-
- if(valuesBiggerThanMax > 10){
- break;
- }
-
- }
- System.out.println("step\torigObj\tobj\tsuffdec\tnorm\tcurvature1");
- for(int i = 0; i < stepS.size(); i++){
- double cnorm= norm.get(i);
- System.out.println(stepS.get(i)+"\t"+originalObj +"\t"+obj.get(i) + "\t" +
- (originalObj + originalDot*((Double)stepS.get(i))*c1) +"\t"+Math.abs(cnorm) +"\t"+c2*Math.abs(originalDot));
- }
- }
-
- public static double[] getNumericalGradient(Objective o, double[] parameters, double epsilon){
- int nrParameters = o.getNumParameters();
- double[] gradient = new double[nrParameters];
- double[] newParameters;
- double originalValue = o.getValue();
- for(int parameter = 0; parameter < nrParameters; parameter++){
- newParameters = parameters.clone();
- newParameters[parameter]+=epsilon;
- o.setParameters(newParameters);
- double newValue = o.getValue();
- gradient[parameter]=(newValue-originalValue)/epsilon;
- }
- return gradient;
- }
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/GradientDescent.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/GradientDescent.java
deleted file mode 100644
index 9a53cef4..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/GradientDescent.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package optimization.gradientBasedMethods;
-
-import optimization.linesearch.LineSearchMethod;
-
-
-
-public class GradientDescent extends AbstractGradientBaseMethod{
-
- public GradientDescent(LineSearchMethod lineSearch) {
- this.lineSearch = lineSearch;
- }
-
- public double[] getDirection(){
- for(int i = 0; i< gradient.length; i++){
- direction[i] = -gradient[i];
- }
- return direction;
- }
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/LBFGS.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/LBFGS.java
deleted file mode 100644
index dedbc942..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/LBFGS.java
+++ /dev/null
@@ -1,234 +0,0 @@
-package optimization.gradientBasedMethods;
-
-
-import optimization.gradientBasedMethods.stats.OptimizerStats;
-import optimization.linesearch.DifferentiableLineSearchObjective;
-import optimization.linesearch.LineSearchMethod;
-import optimization.stopCriteria.StopingCriteria;
-import optimization.util.MathUtils;
-
-public class LBFGS extends AbstractGradientBaseMethod{
-
- //How many previous values are being saved
- int history;
- double[][] skList;
- double[][] ykList;
- double initialHessianParameters;
- double[] previousGradient;
- double[] previousParameters;
-
- //auxiliar structures
- double q[];
- double[] roi;
- double[] alphai;
-
- public LBFGS(LineSearchMethod ls, int history) {
- lineSearch = ls;
- this.history = history;
- skList = new double[history][];
- ykList = new double[history][];
-
- }
-
- public void reset(){
- super.reset();
- initialHessianParameters = 0;
- previousParameters = null;
- previousGradient = null;
- skList = new double[history][];
- ykList = new double[history][];
- q = null;
- roi = null;
- alphai = null;
- }
-
- public double[] LBFGSTwoLoopRecursion(double hessianConst){
- //Only create array once
- if(q == null){
- q = new double[gradient.length];
- }
- System.arraycopy(gradient, 0, q, 0, gradient.length);
- //Only create array once
- if(roi == null){
- roi = new double[history];
- }
- //Only create array once
- if(alphai == null){
- alphai = new double[history];
- }
-
- for(int i = history-1; i >=0 && skList[i]!= null && ykList[i]!=null; i-- ){
- // System.out.println("New to Old proj " + currentProjectionIteration + " history "+history + " index " + i);
- double[] si = skList[i];
- double[] yi = ykList[i];
- roi[i]= 1.0/MathUtils.dotProduct(yi,si);
- alphai[i] = MathUtils.dotProduct(si, q)*roi[i];
- MathUtils.plusEquals(q, yi, -alphai[i]);
- }
- //Initial Hessian is just a constant
- MathUtils.scalarMultiplication(q, hessianConst);
- for(int i = 0; i <history && skList[i]!= null && ykList[i]!=null; i++ ){
- // System.out.println("Old to New proj " + currentProjectionIteration + " history "+history + " index " + i);
- double beta = MathUtils.dotProduct(ykList[i], q)*roi[i];
- MathUtils.plusEquals(q, skList[i], (alphai[i]-beta));
- }
- return q;
- }
-
-
-
-
- @Override
- public double[] getDirection() {
-
- calculateInitialHessianParameter();
-// System.out.println("Initial hessian " + initialHessianParameters);
- return direction = MathUtils.negation(LBFGSTwoLoopRecursion(initialHessianParameters));
- }
-
- public void calculateInitialHessianParameter(){
- if(currentProjectionIteration == 1){
- //Use gradient
- initialHessianParameters = 1;
- }else if(currentProjectionIteration <= history){
- double[] sk = skList[currentProjectionIteration-2];
- double[] yk = ykList[currentProjectionIteration-2];
- initialHessianParameters = MathUtils.dotProduct(sk, yk)/MathUtils.dotProduct(yk, yk);
- }else{
- //get the last one
- double[] sk = skList[history-1];
- double[] yk = ykList[history-1];
- initialHessianParameters = MathUtils.dotProduct(sk, yk)/MathUtils.dotProduct(yk, yk);
- }
- }
-
- //TODO if structures exit just reset them to zero
- public void initializeStructures(Objective o,OptimizerStats stats, StopingCriteria stop){
- super.initializeStructures(o, stats, stop);
- previousParameters = new double[o.getNumParameters()];
- previousGradient = new double[o.getNumParameters()];
- }
- public void updateStructuresBeforeStep(Objective o,OptimizerStats stats, StopingCriteria stop){
- super.initializeStructures(o, stats, stop);
- System.arraycopy(o.getParameters(), 0, previousParameters, 0, previousParameters.length);
- System.arraycopy(gradient, 0, previousGradient, 0, gradient.length);
- }
-
- public void updateStructuresAfterStep( Objective o,OptimizerStats stats, StopingCriteria stop){
- double[] diffX = MathUtils.arrayMinus(o.getParameters(), previousParameters);
- double[] diffGrad = MathUtils.arrayMinus(gradient, previousGradient);
- //Save new values and discard new ones
- if(currentProjectionIteration > history){
- for(int i = 0; i < history-1;i++){
- skList[i]=skList[i+1];
- ykList[i]=ykList[i+1];
- }
- skList[history-1]=diffX;
- ykList[history-1]=diffGrad;
- }else{
- skList[currentProjectionIteration-1]=diffX;
- ykList[currentProjectionIteration-1]=diffGrad;
- }
- }
-
-// public boolean optimize(Objective o, OptimizerStats stats, StopingCriteria stop) {
-// DifferentiableLineSearchObjective lso = new DifferentiableLineSearchObjective(o);
-// gradient = o.getGradient();
-// direction = new double[o.getNumParameters()];
-// previousGradient = new double[o.getNumParameters()];
-//
-// previousParameters = new double[o.getNumParameters()];
-//
-// stats.collectInitStats(this, o);
-// previousValue = Double.MAX_VALUE;
-// currValue= o.getValue();
-// //Used for stopping criteria
-// double[] originalGradient = o.getGradient();
-//
-// originalGradientL2Norm = MathUtils.L2Norm(originalGradient);
-// if(stop.stopOptimization(originalGradient)){
-// stats.collectFinalStats(this, o);
-// return true;
-// }
-// for (currentProjectionIteration = 1; currentProjectionIteration < maxNumberOfIterations; currentProjectionIteration++){
-//
-//
-// currValue = o.getValue();
-// gradient = o.getGradient();
-// currParameters = o.getParameters();
-//
-//
-// if(currentProjectionIteration == 1){
-// //Use gradient
-// initialHessianParameters = 1;
-// }else if(currentProjectionIteration <= history){
-// double[] sk = skList[currentProjectionIteration-2];
-// double[] yk = ykList[currentProjectionIteration-2];
-// initialHessianParameters = MathUtils.dotProduct(sk, yk)/MathUtils.dotProduct(yk, yk);
-// }else{
-// //get the last one
-// double[] sk = skList[history-1];
-// double[] yk = ykList[history-1];
-// initialHessianParameters = MathUtils.dotProduct(sk, yk)/MathUtils.dotProduct(yk, yk);
-// }
-//
-// getDirection();
-//
-// //MatrixOutput.printDoubleArray(direction, "direction");
-// double dot = MathUtils.dotProduct(direction, gradient);
-// if(dot > 0){
-// throw new RuntimeException("Not a descent direction");
-// } if (Double.isNaN(dot)){
-// throw new RuntimeException("dot is not a number!!");
-// }
-// System.arraycopy(currParameters, 0, previousParameters, 0, currParameters.length);
-// System.arraycopy(gradient, 0, previousGradient, 0, gradient.length);
-// lso.reset(direction);
-// step = lineSearch.getStepSize(lso);
-// if(step==-1){
-// System.out.println("Failed to find a step size");
-//// lso.printLineSearchSteps();
-//// System.out.println(stats.prettyPrint(1));
-// stats.collectFinalStats(this, o);
-// return false;
-// }
-// stats.collectIterationStats(this, o);
-//
-// //We are not updating the alpha since it is done in line search already
-// currParameters = o.getParameters();
-// gradient = o.getGradient();
-//
-// if(stop.stopOptimization(gradient)){
-// stats.collectFinalStats(this, o);
-// return true;
-// }
-// double[] diffX = MathUtils.arrayMinus(currParameters, previousParameters);
-// double[] diffGrad = MathUtils.arrayMinus(gradient, previousGradient);
-// //Save new values and discard new ones
-// if(currentProjectionIteration > history){
-// for(int i = 0; i < history-1;i++){
-// skList[i]=skList[i+1];
-// ykList[i]=ykList[i+1];
-// }
-// skList[history-1]=diffX;
-// ykList[history-1]=diffGrad;
-// }else{
-// skList[currentProjectionIteration-1]=diffX;
-// ykList[currentProjectionIteration-1]=diffGrad;
-// }
-// previousValue = currValue;
-// }
-// stats.collectFinalStats(this, o);
-// return false;
-// }
-
-
-
-
-
-
-
-
-
-
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/Objective.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/Objective.java
deleted file mode 100644
index 6be01bf9..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/Objective.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package optimization.gradientBasedMethods;
-
-
-/**
- * Defines an optimization objective:
- *
- *
- * @author javg
- *
- */
-public abstract class Objective {
-
- protected int functionCalls = 0;
- protected int gradientCalls = 0;
- protected int updateCalls = 0;
-
- protected double[] parameters;
-
- //Contains a cache with the gradient
- public double[] gradient;
- int debugLevel = 0;
-
- public void setDebugLevel(int level){
- debugLevel = level;
- }
-
- public int getNumParameters() {
- return parameters.length;
- }
-
- public double getParameter(int index) {
- return parameters[index];
- }
-
- public double[] getParameters() {
- return parameters;
- }
-
- public abstract double[] getGradient( );
-
- public void setParameter(int index, double value) {
- parameters[index]=value;
- }
-
- public void setParameters(double[] params) {
- if(parameters == null){
- parameters = new double[params.length];
- }
- updateCalls++;
- System.arraycopy(params, 0, parameters, 0, params.length);
- }
-
-
- public int getNumberFunctionCalls() {
- return functionCalls;
- }
-
- public int getNumberGradientCalls() {
- return gradientCalls;
- }
-
- public int getNumberUpdateCalls() {
- return updateCalls;
- }
-
- public String finalInfoString() {
- return "FE: " + functionCalls + " GE " + gradientCalls + " Params updates" +
- updateCalls;
- }
- public void printParameters() {
- System.out.println(toString());
- }
-
- public abstract String toString();
- public abstract double getValue ();
-
- /**
- * Sets the initial objective parameters
- * For unconstrained models this just sets the objective params = argument no copying
- * For a constrained objective project the parameters and then set
- * @param params
- */
- public void setInitialParameters(double[] params){
- parameters = params;
- }
-
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/Optimizer.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/Optimizer.java
deleted file mode 100644
index 96fce5b0..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/Optimizer.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package optimization.gradientBasedMethods;
-
-import optimization.gradientBasedMethods.stats.OptimizerStats;
-import optimization.stopCriteria.StopingCriteria;
-
-public interface Optimizer {
- public boolean optimize(Objective o,OptimizerStats stats, StopingCriteria stoping);
-
-
- public double[] getDirection();
- public double getCurrentStep();
- public double getCurrentValue();
- public int getCurrentIteration();
- public void reset();
-
- public void setMaxIterations(int max);
-
-
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedAbstractGradientBaseMethod.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedAbstractGradientBaseMethod.java
deleted file mode 100644
index afb29d04..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedAbstractGradientBaseMethod.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package optimization.gradientBasedMethods;
-
-
-/**
- *
- * @author javg
- *
- */
-public abstract class ProjectedAbstractGradientBaseMethod extends AbstractGradientBaseMethod implements ProjectedOptimizer{
-
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedGradientDescent.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedGradientDescent.java
deleted file mode 100644
index 0186e945..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedGradientDescent.java
+++ /dev/null
@@ -1,154 +0,0 @@
-package optimization.gradientBasedMethods;
-
-import java.io.IOException;
-
-import optimization.gradientBasedMethods.stats.OptimizerStats;
-import optimization.linesearch.DifferentiableLineSearchObjective;
-import optimization.linesearch.LineSearchMethod;
-import optimization.linesearch.ProjectedDifferentiableLineSearchObjective;
-import optimization.stopCriteria.StopingCriteria;
-import optimization.util.MathUtils;
-
-
-/**
- * This class implements the projected gradiend
- * as described in Bertsekas "Non Linear Programming"
- * section 2.3.
- *
- * The update is given by:
- * x_k+1 = x_k + alpha^k(xbar_k-x_k)
- * Where xbar is:
- * xbar = [x_k -s_k grad(f(x_k))]+
- * where []+ is the projection into the feasibility set
- *
- * alpha is the step size
- * s_k - is a positive scalar which can be view as a step size as well, by
- * setting alpha to 1, then x_k+1 = [x_k -s_k grad(f(x_k))]+
- * This is called taking a step size along the projection arc (Bertsekas) which
- * we will use by default.
- *
- * Note that the only place where we actually take a step size is on pick a step size
- * so this is going to be just like a normal gradient descent but use a different
- * armijo line search where we project after taking a step.
- *
- *
- * @author javg
- *
- */
-public class ProjectedGradientDescent extends ProjectedAbstractGradientBaseMethod{
-
-
-
-
- public ProjectedGradientDescent(LineSearchMethod lineSearch) {
- this.lineSearch = lineSearch;
- }
-
- //Use projected differential objective instead
- public void initializeStructures(Objective o, OptimizerStats stats, StopingCriteria stop) {
- lso = new ProjectedDifferentiableLineSearchObjective(o);
- };
-
-
- ProjectedObjective obj;
- public boolean optimize(ProjectedObjective o,OptimizerStats stats, StopingCriteria stop){
- obj = o;
- return super.optimize(o, stats, stop);
- }
-
- public double[] getDirection(){
- for(int i = 0; i< gradient.length; i++){
- direction[i] = -gradient[i];
- }
- return direction;
- }
-
-
-
-
-}
-
-
-
-
-
-
-
-///OLD CODE
-
-//Use projected gradient norm
-//public boolean stopCriteria(double[] gradient){
-// if(originalDirenctionL2Norm == 0){
-// System.out.println("Leaving original direction norm is zero");
-// return true;
-// }
-// if(MathUtils.L2Norm(direction)/originalDirenctionL2Norm < gradientConvergenceValue){
-// System.out.println("Leaving projected gradient Norm smaller than epsilon");
-// return true;
-// }
-// if((previousValue - currValue)/Math.abs(previousValue) < valueConvergenceValue) {
-// System.out.println("Leaving value change below treshold " + previousValue + " - " + currValue);
-// System.out.println(previousValue/currValue + " - " + currValue/currValue
-// + " = " + (previousValue - currValue)/Math.abs(previousValue));
-// return true;
-// }
-// return false;
-//}
-//
-
-//public boolean optimize(ProjectedObjective o,OptimizerStats stats, StopingCriteria stop){
-// stats.collectInitStats(this, o);
-// obj = o;
-// step = 0;
-// currValue = o.getValue();
-// previousValue = Double.MAX_VALUE;
-// gradient = o.getGradient();
-// originalGradientL2Norm = MathUtils.L2Norm(gradient);
-// parameterChange = new double[gradient.length];
-// getDirection();
-// ProjectedDifferentiableLineSearchObjective lso = new ProjectedDifferentiableLineSearchObjective(o,direction);
-//
-// originalDirenctionL2Norm = MathUtils.L2Norm(direction);
-// //MatrixOutput.printDoubleArray(currParameters, "parameters");
-// for (currentProjectionIteration = 0; currentProjectionIteration < maxNumberOfIterations; currentProjectionIteration++){
-// // System.out.println("Iter " + currentProjectionIteration);
-// //o.printParameters();
-//
-//
-//
-// if(stop.stopOptimization(gradient)){
-// stats.collectFinalStats(this, o);
-// lastStepUsed = step;
-// return true;
-// }
-// lso.reset(direction);
-// step = lineSearch.getStepSize(lso);
-// if(step==-1){
-// System.out.println("Failed to find step");
-// stats.collectFinalStats(this, o);
-// return false;
-//
-// }
-//
-// //Update the direction for stopping criteria
-// previousValue = currValue;
-// currValue = o.getValue();
-// gradient = o.getGradient();
-// direction = getDirection();
-// if(MathUtils.dotProduct(gradient, direction) > 0){
-// System.out.println("Not a descent direction");
-// System.out.println(" current stats " + stats.prettyPrint(1));
-// System.exit(-1);
-// }
-// stats.collectIterationStats(this, o);
-// }
-// lastStepUsed = step;
-// stats.collectFinalStats(this, o);
-// return false;
-// }
-
-//public boolean optimize(Objective o,OptimizerStats stats, StopingCriteria stop){
-// System.out.println("Objective is not a projected objective");
-// throw new RuntimeException();
-//}
-
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedObjective.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedObjective.java
deleted file mode 100644
index c3d21393..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedObjective.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package optimization.gradientBasedMethods;
-
-import optimization.util.MathUtils;
-
-
-/**
- * Computes a projected objective
- * When we tell it to set some parameters it automatically projects the parameters back into the simplex:
- *
- *
- * When we tell it to get the gradient in automatically returns the projected gradient:
- * @author javg
- *
- */
-public abstract class ProjectedObjective extends Objective{
-
- public abstract double[] projectPoint (double[] point);
-
- public double[] auxParameters;
-
-
- public void setInitialParameters(double[] params){
- setParameters(projectPoint(params));
- }
-
-
-
-
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedOptimizer.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedOptimizer.java
deleted file mode 100644
index 81d8403e..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/ProjectedOptimizer.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package optimization.gradientBasedMethods;
-
-
-
-public interface ProjectedOptimizer extends Optimizer{
-
-
-
-
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/stats/OptimizerStats.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/stats/OptimizerStats.java
deleted file mode 100644
index 6340ef73..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/stats/OptimizerStats.java
+++ /dev/null
@@ -1,86 +0,0 @@
-package optimization.gradientBasedMethods.stats;
-
-import java.util.ArrayList;
-
-import optimization.gradientBasedMethods.Objective;
-import optimization.gradientBasedMethods.Optimizer;
-import optimization.util.MathUtils;
-import optimization.util.StaticTools;
-
-
-public class OptimizerStats {
-
- double start = 0;
- double totalTime = 0;
-
- String objectiveFinalStats;
-
- ArrayList<Double> gradientNorms = new ArrayList<Double>();
- ArrayList<Double> steps = new ArrayList<Double>();
- ArrayList<Double> value = new ArrayList<Double>();
- ArrayList<Integer> iterations = new ArrayList<Integer>();
- double prevValue =0;
-
- public void reset(){
- start = 0;
- totalTime = 0;
-
- objectiveFinalStats="";
-
- gradientNorms.clear();
- steps.clear();
- value.clear();
- iterations.clear();
- prevValue =0;
- }
-
- public void startTime() {
- start = System.currentTimeMillis();
- }
- public void stopTime() {
- totalTime += System.currentTimeMillis() - start;
- }
-
- public String prettyPrint(int level){
- StringBuffer res = new StringBuffer();
- res.append("Total time " + totalTime/1000 + " seconds \n" + "Iterations " + iterations.size() + "\n");
- res.append(objectiveFinalStats+"\n");
- if(level > 0){
- if(iterations.size() > 0){
- res.append("\tIteration"+iterations.get(0)+"\tstep: "+StaticTools.prettyPrint(steps.get(0), "0.00E00", 6)+ "\tgradientNorm "+
- StaticTools.prettyPrint(gradientNorms.get(0), "0.00000E00", 10)+ "\tvalue "+ StaticTools.prettyPrint(value.get(0), "0.000000E00",11)+"\n");
- }
- for(int i = 1; i < iterations.size(); i++){
- res.append("\tIteration:\t"+iterations.get(i)+"\tstep:"+StaticTools.prettyPrint(steps.get(i), "0.00E00", 6)+ "\tgradientNorm "+
- StaticTools.prettyPrint(gradientNorms.get(i), "0.00000E00", 10)+
- "\tvalue:\t"+ StaticTools.prettyPrint(value.get(i), "0.000000E00",11)+
- "\tvalueDiff:\t"+ StaticTools.prettyPrint((value.get(i-1)-value.get(i)), "0.000000E00",11)+
- "\n");
- }
- }
- return res.toString();
- }
-
-
- public void collectInitStats(Optimizer optimizer, Objective objective){
- startTime();
- iterations.add(-1);
- gradientNorms.add(MathUtils.L2Norm(objective.getGradient()));
- steps.add(0.0);
- value.add(objective.getValue());
- }
-
- public void collectIterationStats(Optimizer optimizer, Objective objective){
- iterations.add(optimizer.getCurrentIteration());
- gradientNorms.add(MathUtils.L2Norm(objective.getGradient()));
- steps.add(optimizer.getCurrentStep());
- value.add(optimizer.getCurrentValue());
- }
-
-
- public void collectFinalStats(Optimizer optimizer, Objective objective){
- stopTime();
- objectiveFinalStats = objective.finalInfoString();
- }
-
-}
diff --git a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/stats/ProjectedOptimizerStats.java b/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/stats/ProjectedOptimizerStats.java
deleted file mode 100644
index d65a1267..00000000
--- a/gi/posterior-regularisation/prjava/src/optimization/gradientBasedMethods/stats/ProjectedOptimizerStats.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package optimization.gradientBasedMethods.stats;
-
-import java.util.ArrayList;
-
-import optimization.gradientBasedMethods.Objective;
-import optimization.gradientBasedMethods.Optimizer;
-import optimization.gradientBasedMethods.ProjectedObjective;
-import optimization.gradientBasedMethods.ProjectedOptimizer;
-import optimization.util.MathUtils;
-import optimization.util.StaticTools;
-
-
-public class ProjectedOptimizerStats extends OptimizerStats{
-
-
-
- public void reset(){
- super.reset();
- projectedGradientNorms.clear();
- }
-
- ArrayList<Double> projectedGradientNorms = new ArrayList<Double>();
-
- public String prettyPrint(int level){
- StringBuffer res = new StringBuffer();
- res.append("Total time " + totalTime/1000 + " seconds \n" + "Iterations " + iterations.size() + "\n");
- res.append(objectiveFinalStats+"\n");
- if(level > 0){
- if(iterations.size() > 0){
- res.append("\tIteration"+iterations.get(0)+"\tstep: "+
- StaticTools.prettyPrint(steps.get(0), "0.00E00", 6)+ "\tgradientNorm "+
- StaticTools.prettyPrint(gradientNorms.get(0), "0.00000E00", 10)
- + "\tdirection"+
- StaticTools.prettyPrint(projectedGradientNorms.get(0), "0.00000E00", 10)+
- "\tvalue "+ StaticTools.prettyPrint(value.get(0), "0.000000E00",11)+"\n");
- }
- for(int i = 1; i < iterations.size(); i++){
- res.append("\tIteration"+iterations.get(i)+"\tstep: "+StaticTools.prettyPrint(steps.get(i), "0.00E00", 6)+ "\tgradientNorm "+
- StaticTools.prettyPrint(gradientNorms.get(i), "0.00000E00", 10)+
- "\t direction "+
- StaticTools.prettyPrint(projectedGradientNorms.get(i), "0.00000E00", 10)+
- "\tvalue "+ StaticTools.prettyPrint(value.get(i), "0.000000E00",11)+
- "\tvalueDiff "+ StaticTools.prettyPrint((value.get(i-1)-value.get(i)), "0.000000E00",11)+
- "\n");
- }
- }
- return res.toString();
- }
-
-
- public void collectInitStats(Optimizer optimizer, Objective objective){
- startTime();
- }
-
- public void collectIterationStats(Optimizer optimizer, Objective objective){
- iterations.add(optimizer.getCurrentIteration());
- gradientNorms.add(MathUtils.L2Norm(objective.getGradient()));
- projectedGradientNorms.add(MathUtils.L2Norm(optimizer.getDirection()));
- steps.add(optimizer.getCurrentStep());
- value.add(optimizer.getCurrentValue());
- }
-
-
-
- public void collectFinalStats(Optimizer optimizer, Objective objective){
- stopTime();
- objectiveFinalStats = objective.finalInfoString();
- }
-
-}