From f09e46d9f15cbd33801d74058bcd7fd780daa047 Mon Sep 17 00:00:00 2001 From: redpony Date: Sat, 23 Oct 2010 14:33:21 +0000 Subject: train without mpi git-svn-id: https://ws10smt.googlecode.com/svn/trunk@690 ec762483-ff6d-05da-a07a-a48fb63a330f --- training/Makefile.am | 11 +++-------- training/mpi_online_optimize.cc | 25 +++++++++++++++++++++++-- training/online_train.cc | 8 -------- 3 files changed, 26 insertions(+), 18 deletions(-) delete mode 100644 training/online_train.cc (limited to 'training') diff --git a/training/Makefile.am b/training/Makefile.am index b3f93529..89d4a4c9 100644 --- a/training/Makefile.am +++ b/training/Makefile.am @@ -9,7 +9,7 @@ bin_PROGRAMS = \ plftools \ collapse_weights \ cllh_filter_grammar \ - online_train + mpi_online_optimize noinst_PROGRAMS = \ lbfgs_test \ @@ -17,18 +17,16 @@ noinst_PROGRAMS = \ TESTS = lbfgs_test optimize_test +mpi_online_optimize_SOURCES = mpi_online_optimize.cc online_optimizer.cc +mpi_online_optimize_LDADD = $(top_srcdir)/decoder/libcdec.a $(top_srcdir)/mteval/libmteval.a $(top_srcdir)/utils/libutils.a -lz if MPI bin_PROGRAMS += mpi_batch_optimize \ - mpi_online_optimize \ compute_cllh mpi_batch_optimize_SOURCES = mpi_batch_optimize.cc optimize.cc mpi_batch_optimize_LDADD = $(top_srcdir)/decoder/libcdec.a $(top_srcdir)/mteval/libmteval.a $(top_srcdir)/utils/libutils.a -lz -mpi_online_optimize_SOURCES = mpi_online_optimize.cc online_optimizer.cc -mpi_online_optimize_LDADD = $(top_srcdir)/decoder/libcdec.a $(top_srcdir)/mteval/libmteval.a $(top_srcdir)/utils/libutils.a -lz - compute_cllh_SOURCES = compute_cllh.cc compute_cllh_LDADD = $(top_srcdir)/decoder/libcdec.a $(top_srcdir)/mteval/libmteval.a $(top_srcdir)/utils/libutils.a -lz endif @@ -36,9 +34,6 @@ endif cllh_filter_grammar_SOURCES = cllh_filter_grammar.cc cllh_filter_grammar_LDADD = $(top_srcdir)/decoder/libcdec.a $(top_srcdir)/mteval/libmteval.a $(top_srcdir)/utils/libutils.a -lz -online_train_SOURCES = online_train.cc online_optimizer.cc -online_train_LDADD = $(top_srcdir)/decoder/libcdec.a $(top_srcdir)/utils/libutils.a -lz - atools_SOURCES = atools.cc atools_LDADD = $(top_srcdir)/decoder/libcdec.a $(top_srcdir)/utils/libutils.a -lz diff --git a/training/mpi_online_optimize.cc b/training/mpi_online_optimize.cc index 96f3bcfb..0f994c59 100644 --- a/training/mpi_online_optimize.cc +++ b/training/mpi_online_optimize.cc @@ -6,10 +6,12 @@ #include #include -#include -#include #include #include +#ifdef HAVE_MPI +#include +#include +#endif #include "verbose.h" #include "hg.h" @@ -194,6 +196,7 @@ struct TrainingObserver : public DecoderObserver { int state; }; +#ifdef HAVE_MPI namespace mpi = boost::mpi; namespace boost { namespace mpi { @@ -201,6 +204,7 @@ namespace boost { namespace mpi { struct is_commutative >, SparseVector > : mpl::true_ { }; } } // end namespace boost::mpi +#endif bool LoadAgenda(const string& file, vector >* a) { ReadFile rf(file); @@ -229,10 +233,15 @@ bool LoadAgenda(const string& file, vector >* a) { } int main(int argc, char** argv) { +#ifdef HAVE_MPI mpi::environment env(argc, argv); mpi::communicator world; const int size = world.size(); const int rank = world.rank(); +#else + const int size = 1; + const int rank = 0; +#endif if (size > 1) SetSilent(true); // turn off verbose decoder output register_feature_functions(); std::tr1::shared_ptr rng; @@ -261,7 +270,11 @@ int main(int argc, char** argv) { } size_t total_corpus_size = 0; +#ifdef HAVE_MPI reduce(world, corpus.size(), total_corpus_size, std::plus(), 0); +#else + total_corpus_size = corpus.size(); +#endif if (rank == 0) { cerr << "Total corpus size: " << total_corpus_size << endl; @@ -311,7 +324,9 @@ int main(int argc, char** argv) { int iter = -1; bool converged = false; while (!converged) { +#ifdef HAVE_MPI mpi::timer timer; +#endif weights.InitFromVector(x); weights.InitVector(&lambdas); ++iter; ++titer; @@ -342,16 +357,22 @@ int main(int argc, char** argv) { } SparseVector local_grad, g; observer.GetGradient(&local_grad); +#ifdef HAVE_MPI reduce(world, local_grad, g, std::plus >(), 0); +#else + g.swap(local_grad); +#endif local_grad.clear(); if (rank == 0) { g /= (size_per_proc * size); o->UpdateWeights(g, FD::NumFeats(), &x); } +#ifdef HAVE_MPI broadcast(world, x, 0); broadcast(world, converged, 0); world.barrier(); if (rank == 0) { cerr << " ELAPSED TIME THIS ITERATION=" << timer.elapsed() << endl; } +#endif } } return 0; diff --git a/training/online_train.cc b/training/online_train.cc deleted file mode 100644 index 2e906913..00000000 --- a/training/online_train.cc +++ /dev/null @@ -1,8 +0,0 @@ -#include - -#include "online_optimizer.h" - -int main(int argc, char** argv) { - return 0; -} - -- cgit v1.2.3