summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPatrick Simianer <simianer@cl.uni-heidelberg.de>2012-05-02 15:02:59 +0200
committerPatrick Simianer <simianer@cl.uni-heidelberg.de>2012-05-02 15:02:59 +0200
commit9f001a7d10ccf138861c72bb791403c19d30d084 (patch)
treea67358d0eb1d7dcd764303f83d206f499307bc1b
parent3efa261afb7150f825ab21b6a1f364dc142b84fc (diff)
small improvements
-rw-r--r--dtrain/Makefile.am2
-rw-r--r--dtrain/dtrain.cc12
-rw-r--r--dtrain/dtrain.h2
-rw-r--r--dtrain/pairsampling.h15
-rw-r--r--dtrain/test/example/README4
-rw-r--r--dtrain/test/example/dtrain.ini2
6 files changed, 30 insertions, 7 deletions
diff --git a/dtrain/Makefile.am b/dtrain/Makefile.am
index f39d161e..64fef489 100644
--- a/dtrain/Makefile.am
+++ b/dtrain/Makefile.am
@@ -3,5 +3,5 @@ bin_PROGRAMS = dtrain
dtrain_SOURCES = dtrain.cc score.cc
dtrain_LDADD = $(top_srcdir)/decoder/libcdec.a $(top_srcdir)/mteval/libmteval.a $(top_srcdir)/utils/libutils.a ../klm/lm/libklm.a ../klm/util/libklm_util.a -lz
-AM_CPPFLAGS = -O3 -W -Wall -Wno-sign-compare -I$(top_srcdir)/utils -I$(top_srcdir)/decoder -I$(top_srcdir)/mteval
+AM_CPPFLAGS = -W -Wall -Wno-sign-compare -I$(top_srcdir)/utils -I$(top_srcdir)/decoder -I$(top_srcdir)/mteval
diff --git a/dtrain/dtrain.cc b/dtrain/dtrain.cc
index 8b1fc953..864eb153 100644
--- a/dtrain/dtrain.cc
+++ b/dtrain/dtrain.cc
@@ -424,12 +424,18 @@ main(int argc, char** argv)
for (vector<pair<ScoredHyp,ScoredHyp> >::iterator it = pairs.begin();
it != pairs.end(); it++) {
+#ifdef DTRAIN_FASTER_PERCEPTRON
+ bool rank_error = true; // pair filtering already did this for us
+ rank_errors++;
+ score_t margin = 2.; // compiler, could you get rid of the margin?
+#else
bool rank_error = it->first.model <= it->second.model;
if (rank_error) rank_errors++;
score_t margin = fabs(it->first.model - it->second.model);
- if (!rank_error && margin < 1) margin_violations++;
+ if (!rank_error && margin < 1.) margin_violations++;
+#endif
if (scale_bleu_diff) eta = it->first.score - it->second.score;
- if (rank_error || (gamma && margin<1)) {
+ if (rank_error || (gamma && margin<1.)) {
SparseVector<weight_t> diff_vec = it->first.f - it->second.f;
lambdas.plus_eq_v_times_s(diff_vec, eta);
if (gamma)
@@ -534,8 +540,10 @@ main(int argc, char** argv)
cerr << _np << npairs/(float)in_sz << endl;
cerr << " avg # rank err: ";
cerr << rank_errors/(float)in_sz << endl;
+#ifndef DTRAIN_FASTER_PERCEPTRON
cerr << " avg # margin viol: ";
cerr << margin_violations/(float)in_sz << endl;
+#endif
cerr << " non0 feature count: " << nonz << endl;
cerr << " avg list sz: " << list_sz/(float)in_sz << endl;
cerr << " avg f count: " << f_count/(float)list_sz << endl;
diff --git a/dtrain/dtrain.h b/dtrain/dtrain.h
index 94d149ce..ac0345a4 100644
--- a/dtrain/dtrain.h
+++ b/dtrain/dtrain.h
@@ -32,7 +32,7 @@ inline void register_and_convert(const vector<string>& strs, vector<WordID>& ids
inline string gettmpf(const string path, const string infix)
{
- char fn[1024];
+ char fn[path.size() + infix.size() + 8];
strcpy(fn, path.c_str());
strcat(fn, "/");
strcat(fn, infix.c_str());
diff --git a/dtrain/pairsampling.h b/dtrain/pairsampling.h
index bac132c6..52eeedd6 100644
--- a/dtrain/pairsampling.h
+++ b/dtrain/pairsampling.h
@@ -1,6 +1,9 @@
#ifndef _DTRAIN_PAIRSAMPLING_H_
#define _DTRAIN_PAIRSAMPLING_H_
+#define DTRAIN_FASTER_PERCEPTRON // only look at misranked pairs
+ // DO NOT USE WITH SVM!
+
namespace dtrain
{
@@ -51,6 +54,9 @@ partXYX(vector<ScoredHyp>* s, vector<pair<ScoredHyp,ScoredHyp> >& training, scor
unsigned sep = round(sz*hi_lo);
for (unsigned i = 0; i < sep; i++) {
for (unsigned j = sep; j < sz; j++) {
+#ifdef DTRAIN_FASTER_PERCEPTRON
+ if ((*s)[i].model <= (*s)[j].model) {
+#endif
if (threshold > 0) {
if (accept_pair((*s)[i].score, (*s)[j].score, threshold))
training.push_back(make_pair((*s)[i], (*s)[j]));
@@ -58,10 +64,16 @@ partXYX(vector<ScoredHyp>* s, vector<pair<ScoredHyp,ScoredHyp> >& training, scor
if ((*s)[i].score != (*s)[j].score)
training.push_back(make_pair((*s)[i], (*s)[j]));
}
+#ifdef DTRAIN_FASTER_PERCEPTRON
+ }
+#endif
}
}
for (unsigned i = sep; i < sz-sep; i++) {
for (unsigned j = sz-sep; j < sz; j++) {
+#ifdef DTRAIN_FASTER_PERCEPTRON
+ if ((*s)[i].model <= (*s)[j].model) {
+#endif
if (threshold > 0) {
if (accept_pair((*s)[i].score, (*s)[j].score, threshold))
training.push_back(make_pair((*s)[i], (*s)[j]));
@@ -69,6 +81,9 @@ partXYX(vector<ScoredHyp>* s, vector<pair<ScoredHyp,ScoredHyp> >& training, scor
if ((*s)[i].score != (*s)[j].score)
training.push_back(make_pair((*s)[i], (*s)[j]));
}
+#ifdef DTRAIN_FASTER_PERCEPTRON
+ }
+#endif
}
}
}
diff --git a/dtrain/test/example/README b/dtrain/test/example/README
index b3ea5f06..6937b11b 100644
--- a/dtrain/test/example/README
+++ b/dtrain/test/example/README
@@ -1,8 +1,8 @@
Small example of input format for distributed training.
Call dtrain from cdec/dtrain/ with ./dtrain -c test/example/dtrain.ini .
-For this to work, disable '#define DTRAIN_LOCAL' from dtrain.h
+For this to work, undef 'DTRAIN_LOCAL' in dtrain.h
and recompile.
-Data is here: http://simianer.de/dtrain
+Data is here: http://simianer.de/#dtrain
diff --git a/dtrain/test/example/dtrain.ini b/dtrain/test/example/dtrain.ini
index f87ee9cf..e43d6b34 100644
--- a/dtrain/test/example/dtrain.ini
+++ b/dtrain/test/example/dtrain.ini
@@ -5,7 +5,7 @@ decoder_config=test/example/cdec.ini # config for cdec
# weights for these features will be printed on each iteration
print_weights=Glue WordPenalty LanguageModel LanguageModel_OOV PhraseModel_0 PhraseModel_1 PhraseModel_2 PhraseModel_3 PhraseModel_4 PhraseModel_5 PhraseModel_6 PassThrough
tmp=/tmp
-stop_after=10 # stop epoch after 20 inputs
+stop_after=20 # stop epoch after 20 inputs
# interesting stuff
epochs=3 # run over input 3 times