diff options
Diffstat (limited to 'training')
-rw-r--r-- | training/mpi_batch_optimize.cc | 3 | ||||
-rw-r--r-- | training/mr_optimize_reduce.cc | 3 | ||||
-rw-r--r-- | training/optimize_test.cc | 2 |
3 files changed, 3 insertions, 5 deletions
diff --git a/training/mpi_batch_optimize.cc b/training/mpi_batch_optimize.cc index 046e921c..9f12dba9 100644 --- a/training/mpi_batch_optimize.cc +++ b/training/mpi_batch_optimize.cc @@ -29,7 +29,6 @@ namespace mpi = boost::mpi; #include "sparse_vector.h" using namespace std; -using boost::shared_ptr; namespace po = boost::program_options; bool InitCommandLine(int argc, char** argv, po::variables_map* conf) { @@ -270,7 +269,7 @@ int main(int argc, char** argv) { } Weights::InitFromFile(conf["means"].as<string>(), &means); } - shared_ptr<BatchOptimizer> o; + boost::shared_ptr<BatchOptimizer> o; if (rank == 0) { const string omethod = conf["optimization_method"].as<string>(); if (omethod == "rprop") diff --git a/training/mr_optimize_reduce.cc b/training/mr_optimize_reduce.cc index 15e28fa1..461e6b5f 100644 --- a/training/mr_optimize_reduce.cc +++ b/training/mr_optimize_reduce.cc @@ -15,7 +15,6 @@ #include "sparse_vector.h" using namespace std; -using boost::shared_ptr; namespace po = boost::program_options; void SanityCheck(const vector<double>& w) { @@ -102,7 +101,7 @@ int main(int argc, char** argv) { } Weights::InitFromFile(conf["means"].as<string>(), &means); } - shared_ptr<BatchOptimizer> o; + boost::shared_ptr<BatchOptimizer> o; const string omethod = conf["optimization_method"].as<string>(); if (omethod == "rprop") o.reset(new RPropOptimizer(num_feats)); // TODO add configuration diff --git a/training/optimize_test.cc b/training/optimize_test.cc index fe7ca70f..bff2ca03 100644 --- a/training/optimize_test.cc +++ b/training/optimize_test.cc @@ -102,7 +102,7 @@ void TestOnline() { size_t N = 20; double C = 1.0; double eta0 = 0.2; - shared_ptr<LearningRateSchedule> r(new ExponentialDecayLearningRate(N, eta0, 0.85)); + std::tr1::shared_ptr<LearningRateSchedule> r(new ExponentialDecayLearningRate(N, eta0, 0.85)); //shared_ptr<LearningRateSchedule> r(new StandardLearningRate(N, eta0)); CumulativeL1OnlineOptimizer opt(r, N, C, std::vector<int>()); assert(r->eta(10) < r->eta(1)); |