diff options
author | desaicwtf <desaicwtf@ec762483-ff6d-05da-a07a-a48fb63a330f> | 2010-07-07 14:47:49 +0000 |
---|---|---|
committer | desaicwtf <desaicwtf@ec762483-ff6d-05da-a07a-a48fb63a330f> | 2010-07-07 14:47:49 +0000 |
commit | f1dcb9d495b9a8f27cf83045c5b134f49a8f2b14 (patch) | |
tree | b9209b06491ae1205cc4ab1c4ca398d508c5dd2a /gi | |
parent | b34dc5f6ee44fe4e85508bbca4180b6afa9915c1 (diff) |
save lambdas of previous iteration
git-svn-id: https://ws10smt.googlecode.com/svn/trunk@177 ec762483-ff6d-05da-a07a-a48fb63a330f
Diffstat (limited to 'gi')
-rw-r--r-- | gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java | 21 |
1 files changed, 15 insertions, 6 deletions
diff --git a/gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java b/gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java index b7c62261..0fdc169b 100644 --- a/gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java +++ b/gi/posterior-regularisation/prjava/src/phrase/PhraseObjective.java @@ -19,12 +19,12 @@ import optimization.util.MathUtils; public class PhraseObjective extends ProjectedObjective{
- private static final double GRAD_DIFF = 0.002;
+ private static final double GRAD_DIFF = 0.00002;
public static double INIT_STEP_SIZE = 10;
- public static double VAL_DIFF = 0.001; // FIXME needs to be tuned
+ public static double VAL_DIFF = 0.000001; // FIXME needs to be tuned
//private double c1=0.0001; // wolf stuff
//private double c2=0.9;
-
+ private static double lambda[][];
private PhraseCluster c;
/**@brief
@@ -68,7 +68,16 @@ public class PhraseObjective extends ProjectedObjective{ c=cluster;
data=c.c.data[phrase];
n_param=data.length*c.K;
- parameters=new double [n_param];
+
+ if( lambda==null){
+ lambda=new double[c.c.data.length][];
+ }
+
+ if(lambda[phrase]==null){
+ lambda[phrase]=new double[n_param];
+ }
+
+ parameters=lambda[phrase];
newPoint = new double[n_param];
gradient = new double[n_param];
initP();
@@ -172,7 +181,7 @@ public class PhraseObjective extends ProjectedObjective{ ProjectedGradientDescent optimizer = new ProjectedGradientDescent(ls);
StopingCriteria stopGrad = new ProjectedGradientL2Norm(GRAD_DIFF);
- StopingCriteria stopValue = new ValueDifference(VAL_DIFF);
+ StopingCriteria stopValue = new ValueDifference(VAL_DIFF*(-llh));
CompositeStopingCriteria compositeStop = new CompositeStopingCriteria();
compositeStop.add(stopGrad);
compositeStop.add(stopValue);
@@ -185,7 +194,7 @@ public class PhraseObjective extends ProjectedObjective{ }else{
System.out.println("Failed to optimize");
}
-
+ lambda[phrase]=parameters;
// ps.println(Arrays.toString(parameters));
// for(int edge=0;edge<data.length;edge++){
|