summaryrefslogtreecommitdiff
path: root/dtrain/test/example/dtrain.ini
blob: 66be6bf2070700326e2dc451017f229f51e9900b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
input=test/example/nc-wmt11.1k.gz    # use '-' for stdin
output=weights.gz                    # a weights file (add .gz for gzip compression) or STDOUT '-'
decoder_config=test/example/cdec.ini # config for cdec
# weights for these features will be printed on each iteration
print_weights=Glue WordPenalty LanguageModel LanguageModel_OOV PhraseModel_0 PhraseModel_1 PhraseModel_2 PhraseModel_3 PhraseModel_4 PhraseModel_5 PhraseModel_6 PassThrough
tmp=/tmp
stop_after=100 # stop epoch after 10 inputs

# interesting stuff
epochs=100                # run over input 3 times
k=100                   # use 100best lists
N=4                    # optimize (approx) BLEU4
learning_rate=0.0001    # learning rate
gamma=0           # use SVM reg
scorer=smooth_bleu      # use smooth BLEU of (Liang et al. '06)
sample_from=kbest       # use kbest lists (as opposed to forest)
filter=uniq             # only unique entries in kbest (surface form)
pair_sampling=108010    # 10 vs 80 vs 10 and 80 vs 10
pair_threshold=0        # minimum distance in BLEU (this will still only use pairs with diff > 0)
select_weights=last     # just output last weights