summaryrefslogtreecommitdiff
path: root/training/dtrain/examples/parallelized
diff options
context:
space:
mode:
authorPatrick Simianer <p@simianer.de>2015-01-23 16:06:11 +0100
committerPatrick Simianer <p@simianer.de>2015-01-23 16:06:11 +0100
commit5cc49af3caee21a0b745d949431378acb6b62fdc (patch)
tree23e81b35f6cf1a1c5599f396b598abcdaf39683d /training/dtrain/examples/parallelized
parent32dea3f24e56ac7c17343457c48f750f16838742 (diff)
updated parallelized example
Diffstat (limited to 'training/dtrain/examples/parallelized')
-rw-r--r--training/dtrain/examples/parallelized/cdec.ini2
-rw-r--r--training/dtrain/examples/parallelized/in20
-rw-r--r--training/dtrain/examples/parallelized/refs10
-rw-r--r--training/dtrain/examples/parallelized/work/out.0.057
-rw-r--r--training/dtrain/examples/parallelized/work/out.0.155
-rw-r--r--training/dtrain/examples/parallelized/work/out.0.266
-rw-r--r--training/dtrain/examples/parallelized/work/out.1.057
-rw-r--r--training/dtrain/examples/parallelized/work/out.1.155
-rw-r--r--training/dtrain/examples/parallelized/work/out.1.266
-rw-r--r--training/dtrain/examples/parallelized/work/out.2.065
-rw-r--r--training/dtrain/examples/parallelized/work/out.2.166
-rw-r--r--training/dtrain/examples/parallelized/work/out.2.266
-rw-r--r--training/dtrain/examples/parallelized/work/out.3.065
-rw-r--r--training/dtrain/examples/parallelized/work/out.3.166
-rw-r--r--training/dtrain/examples/parallelized/work/out.3.266
-rw-r--r--training/dtrain/examples/parallelized/work/shard.0.0.in8
-rw-r--r--training/dtrain/examples/parallelized/work/shard.0.0.refs5
-rw-r--r--training/dtrain/examples/parallelized/work/shard.1.0.in8
-rw-r--r--training/dtrain/examples/parallelized/work/shard.1.0.refs5
-rw-r--r--training/dtrain/examples/parallelized/work/shard.2.0.in3
-rw-r--r--training/dtrain/examples/parallelized/work/shard.3.0.in1
-rw-r--r--training/dtrain/examples/parallelized/work/weights.024
-rw-r--r--training/dtrain/examples/parallelized/work/weights.0.023
-rw-r--r--training/dtrain/examples/parallelized/work/weights.0.124
-rw-r--r--training/dtrain/examples/parallelized/work/weights.0.212
-rw-r--r--training/dtrain/examples/parallelized/work/weights.124
-rw-r--r--training/dtrain/examples/parallelized/work/weights.1.023
-rw-r--r--training/dtrain/examples/parallelized/work/weights.1.124
-rw-r--r--training/dtrain/examples/parallelized/work/weights.1.212
-rw-r--r--training/dtrain/examples/parallelized/work/weights.212
-rw-r--r--training/dtrain/examples/parallelized/work/weights.2.012
-rw-r--r--training/dtrain/examples/parallelized/work/weights.2.112
-rw-r--r--training/dtrain/examples/parallelized/work/weights.2.212
-rw-r--r--training/dtrain/examples/parallelized/work/weights.3.012
-rw-r--r--training/dtrain/examples/parallelized/work/weights.3.112
-rw-r--r--training/dtrain/examples/parallelized/work/weights.3.212
36 files changed, 844 insertions, 218 deletions
diff --git a/training/dtrain/examples/parallelized/cdec.ini b/training/dtrain/examples/parallelized/cdec.ini
index 5773029a..733b1653 100644
--- a/training/dtrain/examples/parallelized/cdec.ini
+++ b/training/dtrain/examples/parallelized/cdec.ini
@@ -4,7 +4,7 @@ intersection_strategy=cube_pruning
cubepruning_pop_limit=200
scfg_max_span_limit=15
feature_function=WordPenalty
-feature_function=KLanguageModel ../standard//nc-wmt11.en.srilm.gz
+feature_function=KLanguageModel ../standard/nc-wmt11.en.srilm.gz
#feature_function=ArityPenalty
#feature_function=CMR2008ReorderingFeatures
#feature_function=Dwarf
diff --git a/training/dtrain/examples/parallelized/in b/training/dtrain/examples/parallelized/in
index 51d01fe7..82555908 100644
--- a/training/dtrain/examples/parallelized/in
+++ b/training/dtrain/examples/parallelized/in
@@ -1,10 +1,10 @@
-<seg grammar="grammar/grammar.out.0.gz" id="0">europas nach rassen geteiltes haus</seg>
-<seg grammar="grammar/grammar.out.1.gz" id="1">ein gemeinsames merkmal aller extremen rechten in europa ist ihr rassismus und die tatsache , daß sie das einwanderungsproblem als politischen hebel benutzen .</seg>
-<seg grammar="grammar/grammar.out.2.gz" id="2">der lega nord in italien , der vlaams block in den niederlanden , die anhänger von le pens nationaler front in frankreich , sind beispiele für parteien oder bewegungen , die sich um das gemeinsame thema : ablehnung der zuwanderung gebildet haben und um forderung nach einer vereinfachten politik , um sie zu regeln .</seg>
-<seg grammar="grammar/grammar.out.3.gz" id="3">während individuen wie jörg haidar und jean @-@ marie le pen kommen und ( leider nicht zu bald ) wieder gehen mögen , wird die rassenfrage aus der europäischer politik nicht so bald verschwinden .</seg>
-<seg grammar="grammar/grammar.out.4.gz" id="4">eine alternde einheimische bevölkerung und immer offenere grenzen vermehren die rassistische zersplitterung in den europäischen ländern .</seg>
-<seg grammar="grammar/grammar.out.5.gz" id="5">die großen parteien der rechten und der linken mitte haben sich dem problem gestellt , in dem sie den kopf in den sand gesteckt und allen aussichten zuwider gehofft haben , es möge bald verschwinden .</seg>
-<seg grammar="grammar/grammar.out.6.gz" id="6">das aber wird es nicht , wie die geschichte des rassismus in amerika deutlich zeigt .</seg>
-<seg grammar="grammar/grammar.out.7.gz" id="7">die beziehungen zwischen den rassen standen in den usa über jahrzehnte - und tun das noch heute - im zentrum der politischen debatte . das ging so weit , daß rassentrennung genauso wichtig wie das einkommen wurde , - wenn nicht sogar noch wichtiger - um politische zuneigungen und einstellungen zu bestimmen .</seg>
-<seg grammar="grammar/grammar.out.8.gz" id="8">der erste schritt , um mit der rassenfrage umzugehen ist , ursache und folgen rassistischer feindseligkeiten zu verstehen , auch dann , wenn das bedeutet , unangenehme tatsachen aufzudecken .</seg>
-<seg grammar="grammar/grammar.out.9.gz" id="9">genau das haben in den usa eine große anzahl an forschungsvorhaben in wirtschaft , soziologie , psychologie und politikwissenschaft geleistet . diese forschungen zeigten , daß menschen unterschiedlicher rasse einander deutlich weniger vertrauen .</seg>
+<seg grammar="grammar/grammar.out.0.gz" id="0">europas nach rassen geteiltes haus</seg> ||| europe 's divided racial house
+<seg grammar="grammar/grammar.out.1.gz" id="1">ein gemeinsames merkmal aller extremen rechten in europa ist ihr rassismus und die tatsache , daß sie das einwanderungsproblem als politischen hebel benutzen .</seg> ||| a common feature of europe 's extreme right is its racism and use of the immigration issue as a political wedge .
+<seg grammar="grammar/grammar.out.2.gz" id="2">der lega nord in italien , der vlaams block in den niederlanden , die anhänger von le pens nationaler front in frankreich , sind beispiele für parteien oder bewegungen , die sich um das gemeinsame thema : ablehnung der zuwanderung gebildet haben und um forderung nach einer vereinfachten politik , um sie zu regeln .</seg> ||| the lega nord in italy , the vlaams blok in the netherlands , the supporters of le pen 's national front in france , are all examples of parties or movements formed on the common theme of aversion to immigrants and promotion of simplistic policies to control them .
+<seg grammar="grammar/grammar.out.3.gz" id="3">während individuen wie jörg haidar und jean @-@ marie le pen kommen und ( leider nicht zu bald ) wieder gehen mögen , wird die rassenfrage aus der europäischer politik nicht so bald verschwinden .</seg> ||| while individuals like jorg haidar and jean @-@ marie le pen may come and ( never to soon ) go , the race question will not disappear from european politics anytime soon .
+<seg grammar="grammar/grammar.out.4.gz" id="4">eine alternde einheimische bevölkerung und immer offenere grenzen vermehren die rassistische zersplitterung in den europäischen ländern .</seg> ||| an aging population at home and ever more open borders imply increasing racial fragmentation in european countries .
+<seg grammar="grammar/grammar.out.5.gz" id="5">die großen parteien der rechten und der linken mitte haben sich dem problem gestellt , in dem sie den kopf in den sand gesteckt und allen aussichten zuwider gehofft haben , es möge bald verschwinden .</seg> ||| mainstream parties of the center left and center right have confronted this prospect by hiding their heads in the ground , hoping against hope that the problem will disappear .
+<seg grammar="grammar/grammar.out.6.gz" id="6">das aber wird es nicht , wie die geschichte des rassismus in amerika deutlich zeigt .</seg> ||| it will not , as america 's racial history clearly shows .
+<seg grammar="grammar/grammar.out.7.gz" id="7">die beziehungen zwischen den rassen standen in den usa über jahrzehnte - und tun das noch heute - im zentrum der politischen debatte . das ging so weit , daß rassentrennung genauso wichtig wie das einkommen wurde , - wenn nicht sogar noch wichtiger - um politische zuneigungen und einstellungen zu bestimmen .</seg> ||| race relations in the us have been for decades - and remain - at the center of political debate , to the point that racial cleavages are as important as income , if not more , as determinants of political preferences and attitudes .
+<seg grammar="grammar/grammar.out.8.gz" id="8">der erste schritt , um mit der rassenfrage umzugehen ist , ursache und folgen rassistischer feindseligkeiten zu verstehen , auch dann , wenn das bedeutet , unangenehme tatsachen aufzudecken .</seg> ||| the first step to address racial politics is to understand the origin and consequences of racial animosity , even if it means uncovering unpleasant truths .
+<seg grammar="grammar/grammar.out.9.gz" id="9">genau das haben in den usa eine große anzahl an forschungsvorhaben in wirtschaft , soziologie , psychologie und politikwissenschaft geleistet . diese forschungen zeigten , daß menschen unterschiedlicher rasse einander deutlich weniger vertrauen .</seg> ||| this is precisely what a large amount of research in economics , sociology , psychology and political science has done for the us .
diff --git a/training/dtrain/examples/parallelized/refs b/training/dtrain/examples/parallelized/refs
deleted file mode 100644
index 632e27b0..00000000
--- a/training/dtrain/examples/parallelized/refs
+++ /dev/null
@@ -1,10 +0,0 @@
-europe 's divided racial house
-a common feature of europe 's extreme right is its racism and use of the immigration issue as a political wedge .
-the lega nord in italy , the vlaams blok in the netherlands , the supporters of le pen 's national front in france , are all examples of parties or movements formed on the common theme of aversion to immigrants and promotion of simplistic policies to control them .
-while individuals like jorg haidar and jean @-@ marie le pen may come and ( never to soon ) go , the race question will not disappear from european politics anytime soon .
-an aging population at home and ever more open borders imply increasing racial fragmentation in european countries .
-mainstream parties of the center left and center right have confronted this prospect by hiding their heads in the ground , hoping against hope that the problem will disappear .
-it will not , as america 's racial history clearly shows .
-race relations in the us have been for decades - and remain - at the center of political debate , to the point that racial cleavages are as important as income , if not more , as determinants of political preferences and attitudes .
-the first step to address racial politics is to understand the origin and consequences of racial animosity , even if it means uncovering unpleasant truths .
-this is precisely what a large amount of research in economics , sociology , psychology and political science has done for the us .
diff --git a/training/dtrain/examples/parallelized/work/out.0.0 b/training/dtrain/examples/parallelized/work/out.0.0
index c559dd4d..f394a9b0 100644
--- a/training/dtrain/examples/parallelized/work/out.0.0
+++ b/training/dtrain/examples/parallelized/work/out.0.0
@@ -1,15 +1,16 @@
cdec cfg 'cdec.ini'
Loading the LM will be faster if you build a binary file.
-Reading ../standard//nc-wmt11.en.srilm.gz
+Reading ../standard/nc-wmt11.en.srilm.gz
----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
****************************************************************************************************
-Seeding random number sequence to 405292278
+Seeding random number sequence to 2577966319
dtrain
Parameters:
k 100
N 4
T 1
+ batch 0
scorer 'stupid_bleu'
sample from 'kbest'
filter 'uniq'
@@ -22,41 +23,43 @@ Parameters:
pair threshold 0
select weights 'last'
l1 reg 0 'none'
+ pclr no
max pairs 4294967295
+ repeat 1
cdec cfg 'cdec.ini'
- input 'work/shard.0.0.in'
- refs 'work/shard.0.0.refs'
+ input ''
output 'work/weights.0.0'
(a dot represents 10 inputs)
Iteration #1 of 1.
- 5
+ 3
WEIGHTS
- Glue = +0.2663
- WordPenalty = -0.0079042
- LanguageModel = +0.44782
- LanguageModel_OOV = -0.0401
- PhraseModel_0 = -0.193
- PhraseModel_1 = +0.71321
- PhraseModel_2 = +0.85196
- PhraseModel_3 = -0.43986
- PhraseModel_4 = -0.44803
- PhraseModel_5 = -0.0538
- PhraseModel_6 = -0.1788
- PassThrough = -0.1477
+ Glue = -0.0358
+ WordPenalty = +0.099236
+ LanguageModel = +0.51874
+ LanguageModel_OOV = -0.1512
+ PhraseModel_0 = -0.10121
+ PhraseModel_1 = -0.25462
+ PhraseModel_2 = -0.14282
+ PhraseModel_3 = +0.068512
+ PhraseModel_4 = -0.78139
+ PhraseModel_5 = +0
+ PhraseModel_6 = +0.1547
+ PassThrough = -0.075
---
- 1best avg score: 0.17521 (+0.17521)
- 1best avg model score: 21.556 (+21.556)
- avg # pairs: 1671.2
- avg # rank err: 1118.6
- avg # margin viol: 552.6
- non0 feature count: 12
+ 1best avg score: 0.080513 (+0.080513)
+ 1best avg model score: 6.1321 (+6.1321)
+ avg # pairs: 1848.3
+ avg # rank err: 1096.7
+ avg # margin viol: 751.67
+ k-best loss imp: 100%
+ non0 feature count: 11
avg list sz: 100
- avg f count: 11.32
-(time 0.35 min, 4.2 s/S)
+ avg f count: 10.6
+(time 0.23 min, 4.7 s/S)
Writing weights file to 'work/weights.0.0' ...
done
---
-Best iteration: 1 [SCORE 'stupid_bleu'=0.17521].
-This took 0.35 min.
+Best iteration: 1 [SCORE 'stupid_bleu'=0.080513].
+This took 0.23333 min.
diff --git a/training/dtrain/examples/parallelized/work/out.0.1 b/training/dtrain/examples/parallelized/work/out.0.1
index 8bc7ea9c..d0819a5a 100644
--- a/training/dtrain/examples/parallelized/work/out.0.1
+++ b/training/dtrain/examples/parallelized/work/out.0.1
@@ -1,15 +1,16 @@
cdec cfg 'cdec.ini'
Loading the LM will be faster if you build a binary file.
-Reading ../standard//nc-wmt11.en.srilm.gz
+Reading ../standard/nc-wmt11.en.srilm.gz
----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
****************************************************************************************************
-Seeding random number sequence to 43859692
+Seeding random number sequence to 3555678516
dtrain
Parameters:
k 100
N 4
T 1
+ batch 0
scorer 'stupid_bleu'
sample from 'kbest'
filter 'uniq'
@@ -22,42 +23,44 @@ Parameters:
pair threshold 0
select weights 'last'
l1 reg 0 'none'
+ pclr no
max pairs 4294967295
+ repeat 1
cdec cfg 'cdec.ini'
- input 'work/shard.0.0.in'
- refs 'work/shard.0.0.refs'
+ input ''
output 'work/weights.0.1'
weights in 'work/weights.0'
(a dot represents 10 inputs)
Iteration #1 of 1.
- 5
+ 3
WEIGHTS
- Glue = -0.2699
- WordPenalty = +0.080605
- LanguageModel = -0.026572
- LanguageModel_OOV = -0.30025
- PhraseModel_0 = -0.32076
- PhraseModel_1 = +0.67451
- PhraseModel_2 = +0.92
- PhraseModel_3 = -0.36402
- PhraseModel_4 = -0.592
- PhraseModel_5 = -0.0269
- PhraseModel_6 = -0.28755
- PassThrough = -0.33285
+ Glue = +0.19265
+ WordPenalty = +0.0064601
+ LanguageModel = +0.63102
+ LanguageModel_OOV = -0.58027
+ PhraseModel_0 = -0.71998
+ PhraseModel_1 = +0.67713
+ PhraseModel_2 = +1.2848
+ PhraseModel_3 = -0.30726
+ PhraseModel_4 = -0.91479
+ PhraseModel_5 = +0.026825
+ PhraseModel_6 = -0.31892
+ PassThrough = -0.51565
---
- 1best avg score: 0.26638 (+0.26638)
- 1best avg model score: 53.197 (+53.197)
- avg # pairs: 2028.6
- avg # rank err: 998.2
- avg # margin viol: 918.8
+ 1best avg score: 0.12642 (+0.12642)
+ 1best avg model score: -30.689 (-30.689)
+ avg # pairs: 1682.7
+ avg # rank err: 807
+ avg # margin viol: 872
+ k-best loss imp: 100%
non0 feature count: 12
avg list sz: 100
- avg f count: 10.496
-(time 0.35 min, 4.2 s/S)
+ avg f count: 12
+(time 0.27 min, 5.3 s/S)
Writing weights file to 'work/weights.0.1' ...
done
---
-Best iteration: 1 [SCORE 'stupid_bleu'=0.26638].
-This took 0.35 min.
+Best iteration: 1 [SCORE 'stupid_bleu'=0.12642].
+This took 0.26667 min.
diff --git a/training/dtrain/examples/parallelized/work/out.0.2 b/training/dtrain/examples/parallelized/work/out.0.2
new file mode 100644
index 00000000..62bf8bb9
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/out.0.2
@@ -0,0 +1,66 @@
+ cdec cfg 'cdec.ini'
+Loading the LM will be faster if you build a binary file.
+Reading ../standard/nc-wmt11.en.srilm.gz
+----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
+****************************************************************************************************
+Seeding random number sequence to 2696902705
+
+dtrain
+Parameters:
+ k 100
+ N 4
+ T 1
+ batch 0
+ scorer 'stupid_bleu'
+ sample from 'kbest'
+ filter 'uniq'
+ learning rate 0.0001
+ gamma 0
+ loss margin 1
+ faster perceptron 0
+ pairs 'XYX'
+ hi lo 0.1
+ pair threshold 0
+ select weights 'last'
+ l1 reg 0 'none'
+ pclr no
+ max pairs 4294967295
+ repeat 1
+ cdec cfg 'cdec.ini'
+ input ''
+ output 'work/weights.0.2'
+ weights in 'work/weights.1'
+(a dot represents 10 inputs)
+Iteration #1 of 1.
+ 3
+WEIGHTS
+ Glue = -0.2741
+ WordPenalty = +0.1227
+ LanguageModel = +0.82597
+ LanguageModel_OOV = -0.52135
+ PhraseModel_0 = -0.68526
+ PhraseModel_1 = +0.27265
+ PhraseModel_2 = +0.87438
+ PhraseModel_3 = -0.00012234
+ PhraseModel_4 = -1.0912
+ PhraseModel_5 = +0.0371
+ PhraseModel_6 = -0.2855
+ PassThrough = -0.4831
+ ---
+ 1best avg score: 0.12697 (+0.12697)
+ 1best avg model score: -1.7396 (-1.7396)
+ avg # pairs: 1280.3
+ avg # rank err: 764.33
+ avg # margin viol: 507
+ k-best loss imp: 100%
+ non0 feature count: 12
+ avg list sz: 100
+ avg f count: 10.727
+(time 0.28 min, 5.7 s/S)
+
+Writing weights file to 'work/weights.0.2' ...
+done
+
+---
+Best iteration: 1 [SCORE 'stupid_bleu'=0.12697].
+This took 0.28333 min.
diff --git a/training/dtrain/examples/parallelized/work/out.1.0 b/training/dtrain/examples/parallelized/work/out.1.0
index 65d1e7dc..cc35e676 100644
--- a/training/dtrain/examples/parallelized/work/out.1.0
+++ b/training/dtrain/examples/parallelized/work/out.1.0
@@ -1,15 +1,16 @@
cdec cfg 'cdec.ini'
Loading the LM will be faster if you build a binary file.
-Reading ../standard//nc-wmt11.en.srilm.gz
+Reading ../standard/nc-wmt11.en.srilm.gz
----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
****************************************************************************************************
-Seeding random number sequence to 4126799437
+Seeding random number sequence to 1336015864
dtrain
Parameters:
k 100
N 4
T 1
+ batch 0
scorer 'stupid_bleu'
sample from 'kbest'
filter 'uniq'
@@ -22,41 +23,43 @@ Parameters:
pair threshold 0
select weights 'last'
l1 reg 0 'none'
+ pclr no
max pairs 4294967295
+ repeat 1
cdec cfg 'cdec.ini'
- input 'work/shard.1.0.in'
- refs 'work/shard.1.0.refs'
+ input ''
output 'work/weights.1.0'
(a dot represents 10 inputs)
Iteration #1 of 1.
- 5
+ 3
WEIGHTS
- Glue = -0.3815
- WordPenalty = +0.20064
- LanguageModel = +0.95304
- LanguageModel_OOV = -0.264
- PhraseModel_0 = -0.22362
- PhraseModel_1 = +0.12254
- PhraseModel_2 = +0.26328
- PhraseModel_3 = +0.38018
- PhraseModel_4 = -0.48654
- PhraseModel_5 = +0
- PhraseModel_6 = -0.3645
- PassThrough = -0.2216
+ Glue = -0.2015
+ WordPenalty = +0.078303
+ LanguageModel = +0.90323
+ LanguageModel_OOV = -0.1378
+ PhraseModel_0 = -1.3044
+ PhraseModel_1 = -0.88246
+ PhraseModel_2 = +0.26379
+ PhraseModel_3 = -0.79106
+ PhraseModel_4 = -1.4702
+ PhraseModel_5 = +0.0218
+ PhraseModel_6 = -0.5283
+ PassThrough = -0.2531
---
- 1best avg score: 0.10863 (+0.10863)
- 1best avg model score: -4.9841 (-4.9841)
- avg # pairs: 1345.4
- avg # rank err: 822.4
- avg # margin viol: 501
- non0 feature count: 11
+ 1best avg score: 0.062351 (+0.062351)
+ 1best avg model score: -47.109 (-47.109)
+ avg # pairs: 1284
+ avg # rank err: 844.33
+ avg # margin viol: 216.33
+ k-best loss imp: 100%
+ non0 feature count: 12
avg list sz: 100
- avg f count: 11.814
-(time 0.43 min, 5.2 s/S)
+ avg f count: 11.883
+(time 0.42 min, 8.3 s/S)
Writing weights file to 'work/weights.1.0' ...
done
---
-Best iteration: 1 [SCORE 'stupid_bleu'=0.10863].
-This took 0.43333 min.
+Best iteration: 1 [SCORE 'stupid_bleu'=0.062351].
+This took 0.41667 min.
diff --git a/training/dtrain/examples/parallelized/work/out.1.1 b/training/dtrain/examples/parallelized/work/out.1.1
index f479fbbc..3d7a7e66 100644
--- a/training/dtrain/examples/parallelized/work/out.1.1
+++ b/training/dtrain/examples/parallelized/work/out.1.1
@@ -1,15 +1,16 @@
cdec cfg 'cdec.ini'
Loading the LM will be faster if you build a binary file.
-Reading ../standard//nc-wmt11.en.srilm.gz
+Reading ../standard/nc-wmt11.en.srilm.gz
----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
****************************************************************************************************
-Seeding random number sequence to 2112412848
+Seeding random number sequence to 1673913538
dtrain
Parameters:
k 100
N 4
T 1
+ batch 0
scorer 'stupid_bleu'
sample from 'kbest'
filter 'uniq'
@@ -22,42 +23,44 @@ Parameters:
pair threshold 0
select weights 'last'
l1 reg 0 'none'
+ pclr no
max pairs 4294967295
+ repeat 1
cdec cfg 'cdec.ini'
- input 'work/shard.1.0.in'
- refs 'work/shard.1.0.refs'
+ input ''
output 'work/weights.1.1'
weights in 'work/weights.0'
(a dot represents 10 inputs)
Iteration #1 of 1.
- 5
+ 3
WEIGHTS
- Glue = -0.3178
- WordPenalty = +0.11092
- LanguageModel = +0.17269
- LanguageModel_OOV = -0.13485
- PhraseModel_0 = -0.45371
- PhraseModel_1 = +0.38789
- PhraseModel_2 = +0.75311
- PhraseModel_3 = -0.38163
- PhraseModel_4 = -0.58817
- PhraseModel_5 = -0.0269
- PhraseModel_6 = -0.27315
- PassThrough = -0.16745
+ Glue = -0.15575
+ WordPenalty = +0.14939
+ LanguageModel = +0.95915
+ LanguageModel_OOV = -0.42267
+ PhraseModel_0 = -0.46337
+ PhraseModel_1 = +0.36682
+ PhraseModel_2 = +0.79339
+ PhraseModel_3 = +0.27497
+ PhraseModel_4 = -1.2038
+ PhraseModel_5 = +0.061325
+ PhraseModel_6 = -0.11143
+ PassThrough = -0.45405
---
- 1best avg score: 0.13169 (+0.13169)
- 1best avg model score: 24.226 (+24.226)
- avg # pairs: 1951.2
- avg # rank err: 985.4
- avg # margin viol: 951
+ 1best avg score: 0.057772 (+0.057772)
+ 1best avg model score: -59.945 (-59.945)
+ avg # pairs: 1647
+ avg # rank err: 878
+ avg # margin viol: 564.67
+ k-best loss imp: 100%
non0 feature count: 12
avg list sz: 100
- avg f count: 11.224
-(time 0.45 min, 5.4 s/S)
+ avg f count: 11.973
+(time 0.42 min, 8.3 s/S)
Writing weights file to 'work/weights.1.1' ...
done
---
-Best iteration: 1 [SCORE 'stupid_bleu'=0.13169].
-This took 0.45 min.
+Best iteration: 1 [SCORE 'stupid_bleu'=0.057772].
+This took 0.41667 min.
diff --git a/training/dtrain/examples/parallelized/work/out.1.2 b/training/dtrain/examples/parallelized/work/out.1.2
new file mode 100644
index 00000000..ba603651
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/out.1.2
@@ -0,0 +1,66 @@
+ cdec cfg 'cdec.ini'
+Loading the LM will be faster if you build a binary file.
+Reading ../standard/nc-wmt11.en.srilm.gz
+----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
+****************************************************************************************************
+Seeding random number sequence to 785956183
+
+dtrain
+Parameters:
+ k 100
+ N 4
+ T 1
+ batch 0
+ scorer 'stupid_bleu'
+ sample from 'kbest'
+ filter 'uniq'
+ learning rate 0.0001
+ gamma 0
+ loss margin 1
+ faster perceptron 0
+ pairs 'XYX'
+ hi lo 0.1
+ pair threshold 0
+ select weights 'last'
+ l1 reg 0 'none'
+ pclr no
+ max pairs 4294967295
+ repeat 1
+ cdec cfg 'cdec.ini'
+ input ''
+ output 'work/weights.1.2'
+ weights in 'work/weights.1'
+(a dot represents 10 inputs)
+Iteration #1 of 1.
+ 3
+WEIGHTS
+ Glue = -0.2323
+ WordPenalty = +0.11501
+ LanguageModel = +0.76484
+ LanguageModel_OOV = -0.57495
+ PhraseModel_0 = -0.64111
+ PhraseModel_1 = +0.44772
+ PhraseModel_2 = +0.98529
+ PhraseModel_3 = +0.022939
+ PhraseModel_4 = -1.1029
+ PhraseModel_5 = +0.0491
+ PhraseModel_6 = -0.315
+ PassThrough = -0.5367
+ ---
+ 1best avg score: 0.24871 (+0.24871)
+ 1best avg model score: -3.0138 (-3.0138)
+ avg # pairs: 1489.7
+ avg # rank err: 644.67
+ avg # margin viol: 549
+ k-best loss imp: 100%
+ non0 feature count: 12
+ avg list sz: 100
+ avg f count: 11.187
+(time 0.43 min, 8.7 s/S)
+
+Writing weights file to 'work/weights.1.2' ...
+done
+
+---
+Best iteration: 1 [SCORE 'stupid_bleu'=0.24871].
+This took 0.43333 min.
diff --git a/training/dtrain/examples/parallelized/work/out.2.0 b/training/dtrain/examples/parallelized/work/out.2.0
new file mode 100644
index 00000000..ab38c637
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/out.2.0
@@ -0,0 +1,65 @@
+ cdec cfg 'cdec.ini'
+Loading the LM will be faster if you build a binary file.
+Reading ../standard/nc-wmt11.en.srilm.gz
+----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
+****************************************************************************************************
+Seeding random number sequence to 3274281797
+
+dtrain
+Parameters:
+ k 100
+ N 4
+ T 1
+ batch 0
+ scorer 'stupid_bleu'
+ sample from 'kbest'
+ filter 'uniq'
+ learning rate 0.0001
+ gamma 0
+ loss margin 1
+ faster perceptron 0
+ pairs 'XYX'
+ hi lo 0.1
+ pair threshold 0
+ select weights 'last'
+ l1 reg 0 'none'
+ pclr no
+ max pairs 4294967295
+ repeat 1
+ cdec cfg 'cdec.ini'
+ input ''
+ output 'work/weights.2.0'
+(a dot represents 10 inputs)
+Iteration #1 of 1.
+ 3
+WEIGHTS
+ Glue = +0.1295
+ WordPenalty = +0.12781
+ LanguageModel = +1.1825
+ LanguageModel_OOV = -0.1667
+ PhraseModel_0 = -0.65167
+ PhraseModel_1 = -0.044563
+ PhraseModel_2 = +0.49706
+ PhraseModel_3 = -0.40367
+ PhraseModel_4 = -1.3438
+ PhraseModel_5 = +0.0435
+ PhraseModel_6 = -0.3743
+ PassThrough = -0.0307
+ ---
+ 1best avg score: 0.08637 (+0.08637)
+ 1best avg model score: -42.175 (-42.175)
+ avg # pairs: 1136.3
+ avg # rank err: 720.67
+ avg # margin viol: 399.67
+ k-best loss imp: 100%
+ non0 feature count: 12
+ avg list sz: 100
+ avg f count: 11.487
+(time 0.22 min, 4.3 s/S)
+
+Writing weights file to 'work/weights.2.0' ...
+done
+
+---
+Best iteration: 1 [SCORE 'stupid_bleu'=0.08637].
+This took 0.21667 min.
diff --git a/training/dtrain/examples/parallelized/work/out.2.1 b/training/dtrain/examples/parallelized/work/out.2.1
new file mode 100644
index 00000000..f86ec520
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/out.2.1
@@ -0,0 +1,66 @@
+ cdec cfg 'cdec.ini'
+Loading the LM will be faster if you build a binary file.
+Reading ../standard/nc-wmt11.en.srilm.gz
+----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
+****************************************************************************************************
+Seeding random number sequence to 3424877412
+
+dtrain
+Parameters:
+ k 100
+ N 4
+ T 1
+ batch 0
+ scorer 'stupid_bleu'
+ sample from 'kbest'
+ filter 'uniq'
+ learning rate 0.0001
+ gamma 0
+ loss margin 1
+ faster perceptron 0
+ pairs 'XYX'
+ hi lo 0.1
+ pair threshold 0
+ select weights 'last'
+ l1 reg 0 'none'
+ pclr no
+ max pairs 4294967295
+ repeat 1
+ cdec cfg 'cdec.ini'
+ input ''
+ output 'work/weights.2.1'
+ weights in 'work/weights.0'
+(a dot represents 10 inputs)
+Iteration #1 of 1.
+ 3
+WEIGHTS
+ Glue = -0.33455
+ WordPenalty = +0.10696
+ LanguageModel = +1.0621
+ LanguageModel_OOV = -0.46617
+ PhraseModel_0 = -0.63382
+ PhraseModel_1 = +0.33225
+ PhraseModel_2 = +0.8501
+ PhraseModel_3 = -0.29374
+ PhraseModel_4 = -1.0908
+ PhraseModel_5 = +0.033425
+ PhraseModel_6 = -0.38922
+ PassThrough = -0.36385
+ ---
+ 1best avg score: 0.12089 (+0.12089)
+ 1best avg model score: -30.902 (-30.902)
+ avg # pairs: 1852
+ avg # rank err: 870.33
+ avg # margin viol: 898.67
+ k-best loss imp: 100%
+ non0 feature count: 12
+ avg list sz: 100
+ avg f count: 12
+(time 0.22 min, 4.3 s/S)
+
+Writing weights file to 'work/weights.2.1' ...
+done
+
+---
+Best iteration: 1 [SCORE 'stupid_bleu'=0.12089].
+This took 0.21667 min.
diff --git a/training/dtrain/examples/parallelized/work/out.2.2 b/training/dtrain/examples/parallelized/work/out.2.2
new file mode 100644
index 00000000..823129c0
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/out.2.2
@@ -0,0 +1,66 @@
+ cdec cfg 'cdec.ini'
+Loading the LM will be faster if you build a binary file.
+Reading ../standard/nc-wmt11.en.srilm.gz
+----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
+****************************************************************************************************
+Seeding random number sequence to 3087490723
+
+dtrain
+Parameters:
+ k 100
+ N 4
+ T 1
+ batch 0
+ scorer 'stupid_bleu'
+ sample from 'kbest'
+ filter 'uniq'
+ learning rate 0.0001
+ gamma 0
+ loss margin 1
+ faster perceptron 0
+ pairs 'XYX'
+ hi lo 0.1
+ pair threshold 0
+ select weights 'last'
+ l1 reg 0 'none'
+ pclr no
+ max pairs 4294967295
+ repeat 1
+ cdec cfg 'cdec.ini'
+ input ''
+ output 'work/weights.2.2'
+ weights in 'work/weights.1'
+(a dot represents 10 inputs)
+Iteration #1 of 1.
+ 3
+WEIGHTS
+ Glue = -0.3464
+ WordPenalty = +0.18737
+ LanguageModel = +1.5794
+ LanguageModel_OOV = -0.48725
+ PhraseModel_0 = -1.0015
+ PhraseModel_1 = -0.51734
+ PhraseModel_2 = +0.40486
+ PhraseModel_3 = -0.013031
+ PhraseModel_4 = -1.1546
+ PhraseModel_5 = +0.0371
+ PhraseModel_6 = -0.1892
+ PassThrough = -0.449
+ ---
+ 1best avg score: 0.17557 (+0.17557)
+ 1best avg model score: -15.133 (-15.133)
+ avg # pairs: 1644.7
+ avg # rank err: 830.33
+ avg # margin viol: 766.33
+ k-best loss imp: 100%
+ non0 feature count: 12
+ avg list sz: 100
+ avg f count: 11.267
+(time 0.23 min, 4.7 s/S)
+
+Writing weights file to 'work/weights.2.2' ...
+done
+
+---
+Best iteration: 1 [SCORE 'stupid_bleu'=0.17557].
+This took 0.23333 min.
diff --git a/training/dtrain/examples/parallelized/work/out.3.0 b/training/dtrain/examples/parallelized/work/out.3.0
new file mode 100644
index 00000000..2d8dea27
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/out.3.0
@@ -0,0 +1,65 @@
+ cdec cfg 'cdec.ini'
+Loading the LM will be faster if you build a binary file.
+Reading ../standard/nc-wmt11.en.srilm.gz
+----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
+****************************************************************************************************
+Seeding random number sequence to 164953210
+
+dtrain
+Parameters:
+ k 100
+ N 4
+ T 1
+ batch 0
+ scorer 'stupid_bleu'
+ sample from 'kbest'
+ filter 'uniq'
+ learning rate 0.0001
+ gamma 0
+ loss margin 1
+ faster perceptron 0
+ pairs 'XYX'
+ hi lo 0.1
+ pair threshold 0
+ select weights 'last'
+ l1 reg 0 'none'
+ pclr no
+ max pairs 4294967295
+ repeat 1
+ cdec cfg 'cdec.ini'
+ input ''
+ output 'work/weights.3.0'
+(a dot represents 10 inputs)
+Iteration #1 of 1.
+ 1
+WEIGHTS
+ Glue = -0.11
+ WordPenalty = +0.21975
+ LanguageModel = +1.7397
+ LanguageModel_OOV = -0.037
+ PhraseModel_0 = -0.34702
+ PhraseModel_1 = +0.11602
+ PhraseModel_2 = +0.3951
+ PhraseModel_3 = +0.37857
+ PhraseModel_4 = -1.0319
+ PhraseModel_5 = +0.042
+ PhraseModel_6 = -0.253
+ PassThrough = -0.111
+ ---
+ 1best avg score: 0.034204 (+0.034204)
+ 1best avg model score: 0 (+0)
+ avg # pairs: 900
+ avg # rank err: 900
+ avg # margin viol: 0
+ k-best loss imp: 100%
+ non0 feature count: 12
+ avg list sz: 100
+ avg f count: 10.8
+(time 0.12 min, 7 s/S)
+
+Writing weights file to 'work/weights.3.0' ...
+done
+
+---
+Best iteration: 1 [SCORE 'stupid_bleu'=0.034204].
+This took 0.11667 min.
diff --git a/training/dtrain/examples/parallelized/work/out.3.1 b/training/dtrain/examples/parallelized/work/out.3.1
new file mode 100644
index 00000000..a1eeb64b
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/out.3.1
@@ -0,0 +1,66 @@
+ cdec cfg 'cdec.ini'
+Loading the LM will be faster if you build a binary file.
+Reading ../standard/nc-wmt11.en.srilm.gz
+----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
+****************************************************************************************************
+Seeding random number sequence to 2079701870
+
+dtrain
+Parameters:
+ k 100
+ N 4
+ T 1
+ batch 0
+ scorer 'stupid_bleu'
+ sample from 'kbest'
+ filter 'uniq'
+ learning rate 0.0001
+ gamma 0
+ loss margin 1
+ faster perceptron 0
+ pairs 'XYX'
+ hi lo 0.1
+ pair threshold 0
+ select weights 'last'
+ l1 reg 0 'none'
+ pclr no
+ max pairs 4294967295
+ repeat 1
+ cdec cfg 'cdec.ini'
+ input ''
+ output 'work/weights.3.1'
+ weights in 'work/weights.0'
+(a dot represents 10 inputs)
+Iteration #1 of 1.
+ 1
+WEIGHTS
+ Glue = -0.63235
+ WordPenalty = +0.10761
+ LanguageModel = +1.4703
+ LanguageModel_OOV = -0.45548
+ PhraseModel_0 = -0.34858
+ PhraseModel_1 = +0.050651
+ PhraseModel_2 = +0.32137
+ PhraseModel_3 = +0.31848
+ PhraseModel_4 = -0.96702
+ PhraseModel_5 = +0.026825
+ PhraseModel_6 = -0.30802
+ PassThrough = -0.43805
+ ---
+ 1best avg score: 0.078383 (+0.078383)
+ 1best avg model score: -68.182 (-68.182)
+ avg # pairs: 1411
+ avg # rank err: 599
+ avg # margin viol: 801
+ k-best loss imp: 100%
+ non0 feature count: 12
+ avg list sz: 100
+ avg f count: 12
+(time 0.12 min, 7 s/S)
+
+Writing weights file to 'work/weights.3.1' ...
+done
+
+---
+Best iteration: 1 [SCORE 'stupid_bleu'=0.078383].
+This took 0.11667 min.
diff --git a/training/dtrain/examples/parallelized/work/out.3.2 b/training/dtrain/examples/parallelized/work/out.3.2
new file mode 100644
index 00000000..a0c0e509
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/out.3.2
@@ -0,0 +1,66 @@
+ cdec cfg 'cdec.ini'
+Loading the LM will be faster if you build a binary file.
+Reading ../standard/nc-wmt11.en.srilm.gz
+----5---10---15---20---25---30---35---40---45---50---55---60---65---70---75---80---85---90---95--100
+****************************************************************************************************
+Seeding random number sequence to 3524794953
+
+dtrain
+Parameters:
+ k 100
+ N 4
+ T 1
+ batch 0
+ scorer 'stupid_bleu'
+ sample from 'kbest'
+ filter 'uniq'
+ learning rate 0.0001
+ gamma 0
+ loss margin 1
+ faster perceptron 0
+ pairs 'XYX'
+ hi lo 0.1
+ pair threshold 0
+ select weights 'last'
+ l1 reg 0 'none'
+ pclr no
+ max pairs 4294967295
+ repeat 1
+ cdec cfg 'cdec.ini'
+ input ''
+ output 'work/weights.3.2'
+ weights in 'work/weights.1'
+(a dot represents 10 inputs)
+Iteration #1 of 1.
+ 1
+WEIGHTS
+ Glue = -0.2581
+ WordPenalty = +0.091647
+ LanguageModel = +0.77537
+ LanguageModel_OOV = -0.57165
+ PhraseModel_0 = -0.5794
+ PhraseModel_1 = +0.46929
+ PhraseModel_2 = +0.95471
+ PhraseModel_3 = +0.12107
+ PhraseModel_4 = -1.0053
+ PhraseModel_5 = +0.0371
+ PhraseModel_6 = -0.3253
+ PassThrough = -0.5334
+ ---
+ 1best avg score: 0.10945 (+0.10945)
+ 1best avg model score: -23.077 (-23.077)
+ avg # pairs: 1545
+ avg # rank err: 987
+ avg # margin viol: 558
+ k-best loss imp: 100%
+ non0 feature count: 12
+ avg list sz: 100
+ avg f count: 12
+(time 0.12 min, 7 s/S)
+
+Writing weights file to 'work/weights.3.2' ...
+done
+
+---
+Best iteration: 1 [SCORE 'stupid_bleu'=0.10945].
+This took 0.11667 min.
diff --git a/training/dtrain/examples/parallelized/work/shard.0.0.in b/training/dtrain/examples/parallelized/work/shard.0.0.in
index 92f9c78e..fb8c2cd6 100644
--- a/training/dtrain/examples/parallelized/work/shard.0.0.in
+++ b/training/dtrain/examples/parallelized/work/shard.0.0.in
@@ -1,5 +1,3 @@
-<seg grammar="grammar/grammar.out.0.gz" id="0">europas nach rassen geteiltes haus</seg>
-<seg grammar="grammar/grammar.out.1.gz" id="1">ein gemeinsames merkmal aller extremen rechten in europa ist ihr rassismus und die tatsache , daß sie das einwanderungsproblem als politischen hebel benutzen .</seg>
-<seg grammar="grammar/grammar.out.2.gz" id="2">der lega nord in italien , der vlaams block in den niederlanden , die anhänger von le pens nationaler front in frankreich , sind beispiele für parteien oder bewegungen , die sich um das gemeinsame thema : ablehnung der zuwanderung gebildet haben und um forderung nach einer vereinfachten politik , um sie zu regeln .</seg>
-<seg grammar="grammar/grammar.out.3.gz" id="3">während individuen wie jörg haidar und jean @-@ marie le pen kommen und ( leider nicht zu bald ) wieder gehen mögen , wird die rassenfrage aus der europäischer politik nicht so bald verschwinden .</seg>
-<seg grammar="grammar/grammar.out.4.gz" id="4">eine alternde einheimische bevölkerung und immer offenere grenzen vermehren die rassistische zersplitterung in den europäischen ländern .</seg>
+<seg grammar="grammar/grammar.out.1.gz" id="1">ein gemeinsames merkmal aller extremen rechten in europa ist ihr rassismus und die tatsache , daß sie das einwanderungsproblem als politischen hebel benutzen .</seg> ||| a common feature of europe 's extreme right is its racism and use of the immigration issue as a political wedge .
+<seg grammar="grammar/grammar.out.6.gz" id="6">das aber wird es nicht , wie die geschichte des rassismus in amerika deutlich zeigt .</seg> ||| it will not , as america 's racial history clearly shows .
+<seg grammar="grammar/grammar.out.5.gz" id="5">die großen parteien der rechten und der linken mitte haben sich dem problem gestellt , in dem sie den kopf in den sand gesteckt und allen aussichten zuwider gehofft haben , es möge bald verschwinden .</seg> ||| mainstream parties of the center left and center right have confronted this prospect by hiding their heads in the ground , hoping against hope that the problem will disappear .
diff --git a/training/dtrain/examples/parallelized/work/shard.0.0.refs b/training/dtrain/examples/parallelized/work/shard.0.0.refs
deleted file mode 100644
index bef68fee..00000000
--- a/training/dtrain/examples/parallelized/work/shard.0.0.refs
+++ /dev/null
@@ -1,5 +0,0 @@
-europe 's divided racial house
-a common feature of europe 's extreme right is its racism and use of the immigration issue as a political wedge .
-the lega nord in italy , the vlaams blok in the netherlands , the supporters of le pen 's national front in france , are all examples of parties or movements formed on the common theme of aversion to immigrants and promotion of simplistic policies to control them .
-while individuals like jorg haidar and jean @-@ marie le pen may come and ( never to soon ) go , the race question will not disappear from european politics anytime soon .
-an aging population at home and ever more open borders imply increasing racial fragmentation in european countries .
diff --git a/training/dtrain/examples/parallelized/work/shard.1.0.in b/training/dtrain/examples/parallelized/work/shard.1.0.in
index b7695ce7..c28d1502 100644
--- a/training/dtrain/examples/parallelized/work/shard.1.0.in
+++ b/training/dtrain/examples/parallelized/work/shard.1.0.in
@@ -1,5 +1,3 @@
-<seg grammar="grammar/grammar.out.5.gz" id="5">die großen parteien der rechten und der linken mitte haben sich dem problem gestellt , in dem sie den kopf in den sand gesteckt und allen aussichten zuwider gehofft haben , es möge bald verschwinden .</seg>
-<seg grammar="grammar/grammar.out.6.gz" id="6">das aber wird es nicht , wie die geschichte des rassismus in amerika deutlich zeigt .</seg>
-<seg grammar="grammar/grammar.out.7.gz" id="7">die beziehungen zwischen den rassen standen in den usa über jahrzehnte - und tun das noch heute - im zentrum der politischen debatte . das ging so weit , daß rassentrennung genauso wichtig wie das einkommen wurde , - wenn nicht sogar noch wichtiger - um politische zuneigungen und einstellungen zu bestimmen .</seg>
-<seg grammar="grammar/grammar.out.8.gz" id="8">der erste schritt , um mit der rassenfrage umzugehen ist , ursache und folgen rassistischer feindseligkeiten zu verstehen , auch dann , wenn das bedeutet , unangenehme tatsachen aufzudecken .</seg>
-<seg grammar="grammar/grammar.out.9.gz" id="9">genau das haben in den usa eine große anzahl an forschungsvorhaben in wirtschaft , soziologie , psychologie und politikwissenschaft geleistet . diese forschungen zeigten , daß menschen unterschiedlicher rasse einander deutlich weniger vertrauen .</seg>
+<seg grammar="grammar/grammar.out.7.gz" id="7">die beziehungen zwischen den rassen standen in den usa über jahrzehnte - und tun das noch heute - im zentrum der politischen debatte . das ging so weit , daß rassentrennung genauso wichtig wie das einkommen wurde , - wenn nicht sogar noch wichtiger - um politische zuneigungen und einstellungen zu bestimmen .</seg> ||| race relations in the us have been for decades - and remain - at the center of political debate , to the point that racial cleavages are as important as income , if not more , as determinants of political preferences and attitudes .
+<seg grammar="grammar/grammar.out.0.gz" id="0">europas nach rassen geteiltes haus</seg> ||| europe 's divided racial house
+<seg grammar="grammar/grammar.out.2.gz" id="2">der lega nord in italien , der vlaams block in den niederlanden , die anhänger von le pens nationaler front in frankreich , sind beispiele für parteien oder bewegungen , die sich um das gemeinsame thema : ablehnung der zuwanderung gebildet haben und um forderung nach einer vereinfachten politik , um sie zu regeln .</seg> ||| the lega nord in italy , the vlaams blok in the netherlands , the supporters of le pen 's national front in france , are all examples of parties or movements formed on the common theme of aversion to immigrants and promotion of simplistic policies to control them .
diff --git a/training/dtrain/examples/parallelized/work/shard.1.0.refs b/training/dtrain/examples/parallelized/work/shard.1.0.refs
deleted file mode 100644
index 6076f6d5..00000000
--- a/training/dtrain/examples/parallelized/work/shard.1.0.refs
+++ /dev/null
@@ -1,5 +0,0 @@
-mainstream parties of the center left and center right have confronted this prospect by hiding their heads in the ground , hoping against hope that the problem will disappear .
-it will not , as america 's racial history clearly shows .
-race relations in the us have been for decades - and remain - at the center of political debate , to the point that racial cleavages are as important as income , if not more , as determinants of political preferences and attitudes .
-the first step to address racial politics is to understand the origin and consequences of racial animosity , even if it means uncovering unpleasant truths .
-this is precisely what a large amount of research in economics , sociology , psychology and political science has done for the us .
diff --git a/training/dtrain/examples/parallelized/work/shard.2.0.in b/training/dtrain/examples/parallelized/work/shard.2.0.in
new file mode 100644
index 00000000..85f68e20
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/shard.2.0.in
@@ -0,0 +1,3 @@
+<seg grammar="grammar/grammar.out.4.gz" id="4">eine alternde einheimische bevölkerung und immer offenere grenzen vermehren die rassistische zersplitterung in den europäischen ländern .</seg> ||| an aging population at home and ever more open borders imply increasing racial fragmentation in european countries .
+<seg grammar="grammar/grammar.out.3.gz" id="3">während individuen wie jörg haidar und jean @-@ marie le pen kommen und ( leider nicht zu bald ) wieder gehen mögen , wird die rassenfrage aus der europäischer politik nicht so bald verschwinden .</seg> ||| while individuals like jorg haidar and jean @-@ marie le pen may come and ( never to soon ) go , the race question will not disappear from european politics anytime soon .
+<seg grammar="grammar/grammar.out.8.gz" id="8">der erste schritt , um mit der rassenfrage umzugehen ist , ursache und folgen rassistischer feindseligkeiten zu verstehen , auch dann , wenn das bedeutet , unangenehme tatsachen aufzudecken .</seg> ||| the first step to address racial politics is to understand the origin and consequences of racial animosity , even if it means uncovering unpleasant truths .
diff --git a/training/dtrain/examples/parallelized/work/shard.3.0.in b/training/dtrain/examples/parallelized/work/shard.3.0.in
new file mode 100644
index 00000000..f7cbb3e3
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/shard.3.0.in
@@ -0,0 +1 @@
+<seg grammar="grammar/grammar.out.9.gz" id="9">genau das haben in den usa eine große anzahl an forschungsvorhaben in wirtschaft , soziologie , psychologie und politikwissenschaft geleistet . diese forschungen zeigten , daß menschen unterschiedlicher rasse einander deutlich weniger vertrauen .</seg> ||| this is precisely what a large amount of research in economics , sociology , psychology and political science has done for the us .
diff --git a/training/dtrain/examples/parallelized/work/weights.0 b/training/dtrain/examples/parallelized/work/weights.0
index ddd595a8..aa494afb 100644
--- a/training/dtrain/examples/parallelized/work/weights.0
+++ b/training/dtrain/examples/parallelized/work/weights.0
@@ -1,12 +1,12 @@
-LanguageModel 0.7004298992212881
-PhraseModel_2 0.5576194336478857
-PhraseModel_1 0.41787318415343155
-PhraseModel_4 -0.46728502545635164
-PhraseModel_3 -0.029839521598455515
-Glue -0.05760000000000068
-PhraseModel_6 -0.2716499999999978
-PhraseModel_0 -0.20831031065605327
-LanguageModel_OOV -0.15205000000000077
-PassThrough -0.1846500000000006
-WordPenalty 0.09636994553433414
-PhraseModel_5 -0.026900000000000257
+PhraseModel_4 -1.1568444011426948
+LanguageModel 1.0860459962466693
+PhraseModel_0 -0.6010837860294569
+PhraseModel_3 -0.18690910705225725
+PhraseModel_1 -0.26640412994377044
+PhraseModel_6 -0.25022499999999803
+PhraseModel_2 0.2532838373219909
+PassThrough -0.1174500000000002
+WordPenalty 0.1312763645173042
+LanguageModel_OOV -0.12317500000000006
+Glue -0.05444999999999971
+PhraseModel_5 0.026825000000000078
diff --git a/training/dtrain/examples/parallelized/work/weights.0.0 b/training/dtrain/examples/parallelized/work/weights.0.0
index c9370b18..541321af 100644
--- a/training/dtrain/examples/parallelized/work/weights.0.0
+++ b/training/dtrain/examples/parallelized/work/weights.0.0
@@ -1,12 +1,11 @@
-WordPenalty -0.0079041595706392243
-LanguageModel 0.44781580828279532
-LanguageModel_OOV -0.04010000000000042
-Glue 0.26629999999999948
-PhraseModel_0 -0.19299677809125185
-PhraseModel_1 0.71321026861732773
-PhraseModel_2 0.85195540993310537
-PhraseModel_3 -0.43986310822842656
-PhraseModel_4 -0.44802855630415955
-PhraseModel_5 -0.053800000000000514
-PhraseModel_6 -0.17879999999999835
-PassThrough -0.14770000000000036
+LanguageModel_OOV -0.15119999999999936
+PassThrough -0.075000000000000872
+Glue -0.035799999999999721
+PhraseModel_1 -0.25461850237866285
+WordPenalty 0.099236289114895807
+PhraseModel_0 -0.101213892033636
+PhraseModel_2 -0.14281771543359051
+PhraseModel_3 0.068512482804492139
+PhraseModel_4 -0.78138944075452532
+PhraseModel_6 0.15469999999999931
+LanguageModel 0.51873837981298221
diff --git a/training/dtrain/examples/parallelized/work/weights.0.1 b/training/dtrain/examples/parallelized/work/weights.0.1
index 8fad3de8..c983747e 100644
--- a/training/dtrain/examples/parallelized/work/weights.0.1
+++ b/training/dtrain/examples/parallelized/work/weights.0.1
@@ -1,12 +1,12 @@
-WordPenalty 0.080605055841244472
-LanguageModel -0.026571720531022844
-LanguageModel_OOV -0.30024999999999141
-Glue -0.26989999999999842
-PhraseModel_2 0.92000295209089566
-PhraseModel_1 0.67450748692470841
-PhraseModel_4 -0.5920000014976784
-PhraseModel_3 -0.36402437203127397
-PhraseModel_6 -0.28754999999999603
-PhraseModel_0 -0.32076244202907672
-PassThrough -0.33284999999999004
-PhraseModel_5 -0.026900000000000257
+PassThrough -0.51564999999999106
+Glue 0.19265000000000118
+WordPenalty 0.0064601304183101293
+LanguageModel 0.63101690103206198
+LanguageModel_OOV -0.58027499999998244
+PhraseModel_0 -0.7199776484358319
+PhraseModel_1 0.67713208716270057
+PhraseModel_2 1.2847869050798759
+PhraseModel_3 -0.30726076030314797
+PhraseModel_4 -0.9147907962255597
+PhraseModel_5 0.026825000000000078
+PhraseModel_6 -0.31892499999999002
diff --git a/training/dtrain/examples/parallelized/work/weights.0.2 b/training/dtrain/examples/parallelized/work/weights.0.2
new file mode 100644
index 00000000..86795230
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/weights.0.2
@@ -0,0 +1,12 @@
+PassThrough -0.48309999999998859
+Glue -0.27409999999999729
+WordPenalty 0.12269904849971774
+LanguageModel 0.82596659132167016
+LanguageModel_OOV -0.5213499999999861
+PhraseModel_0 -0.68525899286050596
+PhraseModel_1 0.27265146052517253
+PhraseModel_2 0.87438450673072043
+PhraseModel_3 -0.00012233626643227101
+PhraseModel_4 -1.0911805651205244
+PhraseModel_5 0.037100000000000292
+PhraseModel_6 -0.28549999999999121
diff --git a/training/dtrain/examples/parallelized/work/weights.1 b/training/dtrain/examples/parallelized/work/weights.1
index 03058a16..520b575e 100644
--- a/training/dtrain/examples/parallelized/work/weights.1
+++ b/training/dtrain/examples/parallelized/work/weights.1
@@ -1,12 +1,12 @@
-PhraseModel_2 0.8365578543552836
-PhraseModel_4 -0.5900840266009169
-PhraseModel_1 0.5312000609786991
-PhraseModel_0 -0.3872342271319619
-PhraseModel_3 -0.3728279676912084
-Glue -0.2938500000000036
-PhraseModel_6 -0.2803499999999967
-PassThrough -0.25014999999999626
-LanguageModel_OOV -0.21754999999999702
-LanguageModel 0.07306061161169894
-WordPenalty 0.09576193325966899
-PhraseModel_5 -0.026900000000000257
+LanguageModel 1.0306413574382605
+PhraseModel_4 -1.0441183310270499
+PhraseModel_2 0.8124104300969892
+PhraseModel_0 -0.5414354190041899
+LanguageModel_OOV -0.48114999999999053
+PassThrough -0.442899999999993
+PhraseModel_1 0.3567134472577971
+Glue -0.2324999999999999
+PhraseModel_6 -0.2818999999999916
+PhraseModel_3 -0.001886958694580998
+WordPenalty 0.09260244090382065
+PhraseModel_5 0.03710000000000029
diff --git a/training/dtrain/examples/parallelized/work/weights.1.0 b/training/dtrain/examples/parallelized/work/weights.1.0
index 6a6a65c1..68f4eaf2 100644
--- a/training/dtrain/examples/parallelized/work/weights.1.0
+++ b/training/dtrain/examples/parallelized/work/weights.1.0
@@ -1,11 +1,12 @@
-WordPenalty 0.20064405063930751
-LanguageModel 0.9530439901597807
-LanguageModel_OOV -0.26400000000000112
-Glue -0.38150000000000084
-PhraseModel_0 -0.22362384322085468
-PhraseModel_1 0.12253609968953538
-PhraseModel_2 0.26328345736266612
-PhraseModel_3 0.38018406503151553
-PhraseModel_4 -0.48654149460854373
-PhraseModel_6 -0.36449999999999722
-PassThrough -0.22160000000000085
+PhraseModel_4 -1.4702479045005545
+PhraseModel_3 -0.79105519577534078
+PhraseModel_6 -0.52829999999999666
+PhraseModel_5 0.021799999999999924
+LanguageModel 0.90323355461358656
+PhraseModel_2 0.26378844109522476
+PassThrough -0.25310000000000021
+Glue -0.20149999999999982
+PhraseModel_1 -0.88245610760574056
+WordPenalty 0.078303295087152405
+PhraseModel_0 -1.3044311246859424
+LanguageModel_OOV -0.13780000000000128
diff --git a/training/dtrain/examples/parallelized/work/weights.1.1 b/training/dtrain/examples/parallelized/work/weights.1.1
index f56ea4a2..02926c54 100644
--- a/training/dtrain/examples/parallelized/work/weights.1.1
+++ b/training/dtrain/examples/parallelized/work/weights.1.1
@@ -1,12 +1,12 @@
-WordPenalty 0.1109188106780935
-LanguageModel 0.17269294375442074
-LanguageModel_OOV -0.13485000000000266
-Glue -0.3178000000000088
-PhraseModel_2 0.75311275661967159
-PhraseModel_1 0.38789263503268989
-PhraseModel_4 -0.58816805170415531
-PhraseModel_3 -0.38163156335114284
-PhraseModel_6 -0.27314999999999739
-PhraseModel_0 -0.45370601223484697
-PassThrough -0.16745000000000249
-PhraseModel_5 -0.026900000000000257
+PassThrough -0.45404999999998186
+Glue -0.15574999999999967
+WordPenalty 0.14938644441267146
+LanguageModel 0.95914771145227362
+LanguageModel_OOV -0.42267499999998259
+PhraseModel_0 -0.4633667196239511
+PhraseModel_1 0.36681570131202201
+PhraseModel_2 0.7933894810149833
+PhraseModel_3 0.27497076611523918
+PhraseModel_4 -1.2038459762138427
+PhraseModel_5 0.061325000000000914
+PhraseModel_6 -0.11142500000000027
diff --git a/training/dtrain/examples/parallelized/work/weights.1.2 b/training/dtrain/examples/parallelized/work/weights.1.2
new file mode 100644
index 00000000..79a104b3
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/weights.1.2
@@ -0,0 +1,12 @@
+PassThrough -0.53669999999998386
+Glue -0.23230000000000336
+WordPenalty 0.1150120361700277
+LanguageModel 0.76483587762340066
+LanguageModel_OOV -0.57494999999998042
+PhraseModel_0 -0.64110548780098009
+PhraseModel_1 0.44772095653729937
+PhraseModel_2 0.98529136452571298
+PhraseModel_3 0.022939428768845804
+PhraseModel_4 -1.1028511897295128
+PhraseModel_5 0.049100000000000636
+PhraseModel_6 -0.31499999999998796
diff --git a/training/dtrain/examples/parallelized/work/weights.2 b/training/dtrain/examples/parallelized/work/weights.2
new file mode 100644
index 00000000..9c7f5f2a
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/weights.2
@@ -0,0 +1,12 @@
+PhraseModel_4 -1.0884784363200164
+LanguageModel 0.9863954661653327
+PhraseModel_2 0.8048100209655031
+PhraseModel_0 -0.7268058343336511
+LanguageModel_OOV -0.5387999999999846
+PassThrough -0.5005499999999877
+PhraseModel_1 0.16807904188863734
+PhraseModel_6 -0.2787499999999906
+Glue -0.2777249999999977
+WordPenalty 0.12918089364212418
+PhraseModel_3 0.03271485277712574
+PhraseModel_5 0.04010000000000038
diff --git a/training/dtrain/examples/parallelized/work/weights.2.0 b/training/dtrain/examples/parallelized/work/weights.2.0
new file mode 100644
index 00000000..7c7e097d
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/weights.2.0
@@ -0,0 +1,12 @@
+LanguageModel_OOV -0.16669999999999968
+PassThrough -0.030699999999999096
+PhraseModel_5 0.043500000000000219
+PhraseModel_6 -0.37429999999999497
+LanguageModel 1.1825232395261447
+PhraseModel_3 -0.40366624719458399
+PhraseModel_4 -1.3438482384390973
+Glue 0.12950000000000114
+PhraseModel_1 -0.044563165462829533
+WordPenalty 0.12781286602412198
+PhraseModel_0 -0.65166852874668157
+PhraseModel_2 0.49706380871834238
diff --git a/training/dtrain/examples/parallelized/work/weights.2.1 b/training/dtrain/examples/parallelized/work/weights.2.1
new file mode 100644
index 00000000..11714ec1
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/weights.2.1
@@ -0,0 +1,12 @@
+PassThrough -0.36384999999999734
+Glue -0.33455000000000329
+WordPenalty 0.10695587353072468
+LanguageModel 1.0621291481802193
+LanguageModel_OOV -0.46617499999999584
+PhraseModel_0 -0.63382056132769171
+PhraseModel_1 0.33225469649984996
+PhraseModel_2 0.85009991348010649
+PhraseModel_3 -0.29374143412758763
+PhraseModel_4 -1.0908181449386518
+PhraseModel_5 0.033425000000000114
+PhraseModel_6 -0.38922499999998272
diff --git a/training/dtrain/examples/parallelized/work/weights.2.2 b/training/dtrain/examples/parallelized/work/weights.2.2
new file mode 100644
index 00000000..4651c771
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/weights.2.2
@@ -0,0 +1,12 @@
+PassThrough -0.44899999999999302
+Glue -0.34639999999999227
+WordPenalty 0.18736549685511736
+LanguageModel 1.579413019617276
+LanguageModel_OOV -0.48724999999999041
+PhraseModel_0 -1.0014593871340565
+PhraseModel_1 -0.5173431118302918
+PhraseModel_2 0.40485682070199475
+PhraseModel_3 -0.013031148291449997
+PhraseModel_4 -1.1546267627331184
+PhraseModel_5 0.037100000000000292
+PhraseModel_6 -0.18919999999999634
diff --git a/training/dtrain/examples/parallelized/work/weights.3.0 b/training/dtrain/examples/parallelized/work/weights.3.0
new file mode 100644
index 00000000..37bd01a2
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/weights.3.0
@@ -0,0 +1,12 @@
+LanguageModel_OOV -0.036999999999999908
+PassThrough -0.11100000000000057
+Glue -0.11000000000000044
+PhraseModel_1 0.11602125567215119
+WordPenalty 0.2197530078430466
+PhraseModel_0 -0.34702159865156773
+PhraseModel_2 0.39510081490798676
+PhraseModel_3 0.37857253195640361
+PhraseModel_4 -1.0318920208766025
+PhraseModel_5 0.042000000000000176
+PhraseModel_6 -0.25299999999999973
+LanguageModel 1.7396888110339634
diff --git a/training/dtrain/examples/parallelized/work/weights.3.1 b/training/dtrain/examples/parallelized/work/weights.3.1
new file mode 100644
index 00000000..21096c45
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/weights.3.1
@@ -0,0 +1,12 @@
+PassThrough -0.43805000000000188
+Glue -0.63234999999999786
+WordPenalty 0.10760731525357638
+LanguageModel 1.4702716690884872
+LanguageModel_OOV -0.45547500000000124
+PhraseModel_0 -0.34857674662928467
+PhraseModel_1 0.050651304056615561
+PhraseModel_2 0.32136542081299119
+PhraseModel_3 0.31848359353717243
+PhraseModel_4 -0.96701840673014472
+PhraseModel_5 0.026825000000000078
+PhraseModel_6 -0.30802499999999322
diff --git a/training/dtrain/examples/parallelized/work/weights.3.2 b/training/dtrain/examples/parallelized/work/weights.3.2
new file mode 100644
index 00000000..7593e794
--- /dev/null
+++ b/training/dtrain/examples/parallelized/work/weights.3.2
@@ -0,0 +1,12 @@
+PassThrough -0.53339999999998544
+Glue -0.25809999999999805
+WordPenalty 0.091646993043633926
+LanguageModel 0.77536637609898384
+LanguageModel_OOV -0.57164999999998134
+PhraseModel_0 -0.57939946953906185
+PhraseModel_1 0.46928686232236927
+PhraseModel_2 0.95470739190358411
+PhraseModel_3 0.12107346689753942
+PhraseModel_4 -1.0052552276969096
+PhraseModel_5 0.037100000000000292
+PhraseModel_6 -0.32529999999998682