diff options
author | desaicwtf <desaicwtf@ec762483-ff6d-05da-a07a-a48fb63a330f> | 2010-07-07 14:47:49 +0000 |
---|---|---|
committer | desaicwtf <desaicwtf@ec762483-ff6d-05da-a07a-a48fb63a330f> | 2010-07-07 14:47:49 +0000 |
commit | 7156fc276b6b5d6e1efb7afc494190af1cf452d8 (patch) | |
tree | 07b86f66178aae69831d5f58d609caff6daea769 /gi | |
parent | a4208b6ffdded5e03959245026b88f7716406de3 (diff) |
save lambdas of previous iteration
git-svn-id: https://ws10smt.googlecode.com/svn/trunk@177 ec762483-ff6d-05da-a07a-a48fb63a330f
Diffstat (limited to 'gi')
-rw-r--r-- | gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java | 21 |
1 files changed, 15 insertions, 6 deletions
diff --git a/gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java b/gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java index b7c62261..0fdc169b 100644 --- a/gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java +++ b/gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java @@ -19,12 +19,12 @@ import optimization.util.MathUtils; public class PhraseObjective extends ProjectedObjective{
- private static final double GRAD_DIFF = 0.002;
+ private static final double GRAD_DIFF = 0.00002;
public static double INIT_STEP_SIZE = 10;
- public static double VAL_DIFF = 0.001; // FIXME needs to be tuned
+ public static double VAL_DIFF = 0.000001; // FIXME needs to be tuned
//private double c1=0.0001; // wolf stuff
//private double c2=0.9;
-
+ private static double lambda[][];
private PhraseCluster c;
/**@brief
@@ -68,7 +68,16 @@ public class PhraseObjective extends ProjectedObjective{ c=cluster;
data=c.c.data[phrase];
n_param=data.length*c.K;
- parameters=new double [n_param];
+
+ if( lambda==null){
+ lambda=new double[c.c.data.length][];
+ }
+
+ if(lambda[phrase]==null){
+ lambda[phrase]=new double[n_param];
+ }
+
+ parameters=lambda[phrase];
newPoint = new double[n_param];
gradient = new double[n_param];
initP();
@@ -172,7 +181,7 @@ public class PhraseObjective extends ProjectedObjective{ ProjectedGradientDescent optimizer = new ProjectedGradientDescent(ls);
StopingCriteria stopGrad = new ProjectedGradientL2Norm(GRAD_DIFF);
- StopingCriteria stopValue = new ValueDifference(VAL_DIFF);
+ StopingCriteria stopValue = new ValueDifference(VAL_DIFF*(-llh));
CompositeStopingCriteria compositeStop = new CompositeStopingCriteria();
compositeStop.add(stopGrad);
compositeStop.add(stopValue);
@@ -185,7 +194,7 @@ public class PhraseObjective extends ProjectedObjective{ }else{
System.out.println("Failed to optimize");
}
-
+ lambda[phrase]=parameters;
// ps.println(Arrays.toString(parameters));
// for(int edge=0;edge<data.length;edge++){
|