#include #include #include #include #include #include #include #include #include "alignment.h" #include "data_array.h" #include "features/count_source_target.h" #include "features/feature.h" #include "features/is_source_singleton.h" #include "features/is_source_target_singleton.h" #include "features/max_lex_source_given_target.h" #include "features/max_lex_target_given_source.h" #include "features/sample_source_count.h" #include "features/target_given_source_coherent.h" #include "grammar.h" #include "grammar_extractor.h" #include "precomputation.h" #include "rule.h" #include "scorer.h" #include "suffix_array.h" #include "time_util.h" #include "translation_table.h" namespace fs = boost::filesystem; namespace po = boost::program_options; using namespace std; void my_pause() { cerr << "pausing..." << endl; for (int i = 0; i < 10000000; ++i) { cerr << endl; } cerr << "end pause" << endl; } int main(int argc, char** argv) { // TODO(pauldb): Also take arguments from config file. po::options_description desc("Command line options"); desc.add_options() ("help,h", "Show available options") ("source,f", po::value(), "Source language corpus") ("target,e", po::value(), "Target language corpus") ("bitext,b", po::value(), "Parallel text (source ||| target)") ("alignment,a", po::value()->required(), "Bitext word alignment") ("grammars,g", po::value()->required(), "Grammars output path") ("frequent", po::value()->default_value(100), "Number of precomputed frequent patterns") ("super_frequent", po::value()->default_value(10), "Number of precomputed super frequent patterns") ("max_rule_span", po::value()->default_value(15), "Maximum rule span") ("max_rule_symbols,l", po::value()->default_value(5), "Maximum number of symbols (terminals + nontermals) in a rule") ("min_gap_size", po::value()->default_value(1), "Minimum gap size") ("max_phrase_len", po::value()->default_value(4), "Maximum frequent phrase length") ("max_nonterminals", po::value()->default_value(2), "Maximum number of nonterminals in a rule") ("min_frequency", po::value()->default_value(1000), "Minimum number of occurences for a pharse to be considered frequent") ("max_samples", po::value()->default_value(300), "Maximum number of samples") // TODO(pauldb): Check if this works when set to false. ("tight_phrases", po::value()->default_value(true), "False if phrases may be loose (better, but slower)"); po::variables_map vm; po::store(po::parse_command_line(argc, argv, desc), vm); // Check for help argument before notify, so we don't need to pass in the // required parameters. if (vm.count("help")) { cout << desc << endl; return 0; } po::notify(vm); if (!((vm.count("source") && vm.count("target")) || vm.count("bitext"))) { cerr << "A paralel corpus is required. " << "Use -f (source) with -e (target) or -b (bitext)." << endl; return 1; } Clock::time_point preprocess_start_time = Clock::now(); cerr << "Reading source and target data..." << endl; Clock::time_point start_time = Clock::now(); shared_ptr source_data_array, target_data_array; if (vm.count("bitext")) { source_data_array = make_shared( vm["bitext"].as(), SOURCE); target_data_array = make_shared( vm["bitext"].as(), TARGET); } else { source_data_array = make_shared(vm["source"].as()); target_data_array = make_shared(vm["target"].as()); } Clock::time_point stop_time = Clock::now(); cerr << "Reading data took " << GetDuration(start_time, stop_time) << " seconds" << endl; cerr << "Creating source suffix array..." << endl; start_time = Clock::now(); shared_ptr source_suffix_array = make_shared(source_data_array); stop_time = Clock::now(); cerr << "Creating suffix array took " << GetDuration(start_time, stop_time) << " seconds" << endl; cerr << "Reading alignment..." << endl; start_time = Clock::now(); shared_ptr alignment = make_shared(vm["alignment"].as()); stop_time = Clock::now(); cerr << "Reading alignment took " << GetDuration(start_time, stop_time) << " seconds" << endl; cerr << "Precomputating collocations..." << endl; start_time = Clock::now(); shared_ptr precomputation = make_shared( source_suffix_array, vm["frequent"].as(), vm["super_frequent"].as(), vm["max_rule_span"].as(), vm["max_rule_symbols"].as(), vm["min_gap_size"].as(), vm["max_phrase_len"].as(), vm["min_frequency"].as()); stop_time = Clock::now(); cerr << "Precomputing collocations took " << GetDuration(start_time, stop_time) << " seconds" << endl; cerr << "Precomputing conditional probabilities..." << endl; start_time = Clock::now(); shared_ptr table = make_shared( source_data_array, target_data_array, alignment); stop_time = Clock::now(); cerr << "Precomputing conditional probabilities took " << GetDuration(start_time, stop_time) << " seconds" << endl; Clock::time_point preprocess_stop_time = Clock::now(); cerr << "Overall preprocessing step took " << GetDuration(preprocess_start_time, preprocess_stop_time) << " seconds" << endl; Clock::time_point extraction_start_time = Clock::now(); vector > features = { make_shared(), make_shared(), make_shared(), make_shared(table), make_shared(table), make_shared(), make_shared() }; shared_ptr scorer = make_shared(features); // TODO(pauldb): Add parallelization. GrammarExtractor extractor( source_suffix_array, target_data_array, alignment, precomputation, scorer, vm["min_gap_size"].as(), vm["max_rule_span"].as(), vm["max_nonterminals"].as(), vm["max_rule_symbols"].as(), vm["max_samples"].as(), vm["tight_phrases"].as()); int grammar_id = 0; fs::path grammar_path = vm["grammars"].as(); if (!fs::is_directory(grammar_path)) { fs::create_directory(grammar_path); } string sentence, delimiter = "|||"; while (getline(cin, sentence)) { string suffix = ""; int position = sentence.find(delimiter); if (position != sentence.npos) { suffix = sentence.substr(position); sentence = sentence.substr(0, position); } Grammar grammar = extractor.GetGrammar(sentence); string file_name = "grammar." + to_string(grammar_id); fs::path grammar_file = grammar_path / file_name; ofstream output(grammar_file.c_str()); output << grammar; cout << " " << sentence << " " << suffix << endl; ++grammar_id; } Clock::time_point extraction_stop_time = Clock::now(); cerr << "Overall extraction step took " << GetDuration(extraction_start_time, extraction_stop_time) << " seconds" << endl; return 0; }