From 7e583a0880347caf4e9ec84c2c801d1b280cffdd Mon Sep 17 00:00:00 2001
From: Patrick Simianer
Date: Fri, 8 Apr 2016 23:15:09 +0200
Subject: dtrain: fixes
---
training/dtrain/dtrain.cc | 4 ++--
training/dtrain/dtrain.h | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
(limited to 'training/dtrain')
diff --git a/training/dtrain/dtrain.cc b/training/dtrain/dtrain.cc
index 53e8cd50..b488e661 100644
--- a/training/dtrain/dtrain.cc
+++ b/training/dtrain/dtrain.cc
@@ -173,10 +173,10 @@ main(int argc, char** argv)
SparseVector gradient_accum, update_accum;
if (use_adadelta && adadelta_input!="") {
vector grads_tmp;
- Weights::InitFromFile(adadelta_input+".gradient", &grads_tmp);
+ Weights::InitFromFile(adadelta_input+".gradient.gz", &grads_tmp);
Weights::InitSparseVector(grads_tmp, &gradient_accum);
vector update_tmp;
- Weights::InitFromFile(adadelta_input+".update", &update_tmp);
+ Weights::InitFromFile(adadelta_input+".update.gz", &update_tmp);
Weights::InitSparseVector(update_tmp, &update_accum);
}
diff --git a/training/dtrain/dtrain.h b/training/dtrain/dtrain.h
index ce5b2101..883e6028 100644
--- a/training/dtrain/dtrain.h
+++ b/training/dtrain/dtrain.h
@@ -68,7 +68,7 @@ dtrain_init(int argc,
("margin,m", po::value()->default_value(1.0),
"margin for margin perceptron [set =0 for standard perceptron]")
("cut,u", po::value()->default_value(0.1),
- "use top/bottom 10% (default) of k-best as 'good' and 'bad' for pair sampling, 0 to use all pairs TODO")
+ "use top/bottom 10% (default) of k-best as 'good' and 'bad' for pair sampling, 0 to use all pairs")
("adjust,A", po::bool_switch()->default_value(false),
"adjust cut for optimal pos. in k-best to cut")
("score,s", po::value()->default_value("nakov"),
--
cgit v1.2.3