diff options
author | Patrick Simianer <p@simianer.de> | 2016-04-08 23:15:09 +0200 |
---|---|---|
committer | Patrick Simianer <p@simianer.de> | 2016-04-08 23:15:09 +0200 |
commit | 7e583a0880347caf4e9ec84c2c801d1b280cffdd (patch) | |
tree | fe83a3123182b72de1f15bfade4eb0c67e21f637 /training/dtrain | |
parent | 176f311f6b4b2048dd05e0304d66ae5c61a4506e (diff) |
dtrain: fixes
Diffstat (limited to 'training/dtrain')
-rw-r--r-- | training/dtrain/dtrain.cc | 4 | ||||
-rw-r--r-- | training/dtrain/dtrain.h | 2 |
2 files changed, 3 insertions, 3 deletions
diff --git a/training/dtrain/dtrain.cc b/training/dtrain/dtrain.cc index 53e8cd50..b488e661 100644 --- a/training/dtrain/dtrain.cc +++ b/training/dtrain/dtrain.cc @@ -173,10 +173,10 @@ main(int argc, char** argv) SparseVector<weight_t> gradient_accum, update_accum; if (use_adadelta && adadelta_input!="") { vector<weight_t> grads_tmp; - Weights::InitFromFile(adadelta_input+".gradient", &grads_tmp); + Weights::InitFromFile(adadelta_input+".gradient.gz", &grads_tmp); Weights::InitSparseVector(grads_tmp, &gradient_accum); vector<weight_t> update_tmp; - Weights::InitFromFile(adadelta_input+".update", &update_tmp); + Weights::InitFromFile(adadelta_input+".update.gz", &update_tmp); Weights::InitSparseVector(update_tmp, &update_accum); } diff --git a/training/dtrain/dtrain.h b/training/dtrain/dtrain.h index ce5b2101..883e6028 100644 --- a/training/dtrain/dtrain.h +++ b/training/dtrain/dtrain.h @@ -68,7 +68,7 @@ dtrain_init(int argc, ("margin,m", po::value<weight_t>()->default_value(1.0), "margin for margin perceptron [set =0 for standard perceptron]") ("cut,u", po::value<weight_t>()->default_value(0.1), - "use top/bottom 10% (default) of k-best as 'good' and 'bad' for pair sampling, 0 to use all pairs TODO") + "use top/bottom 10% (default) of k-best as 'good' and 'bad' for pair sampling, 0 to use all pairs") ("adjust,A", po::bool_switch()->default_value(false), "adjust cut for optimal pos. in k-best to cut") ("score,s", po::value<string>()->default_value("nakov"), |