blob: 599a2d54c7c3fd511737990d03c8d2e724e1860f (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
|
#ifndef _TROMBLE_LOSS_H_
#define _TROMBLE_LOSS_H_
#include <vector>
#include <boost/scoped_ptr.hpp>
#include <boost/utility/base_from_member.hpp>
#include "ff.h"
#include "wordid.h"
// this may not be the most elegant way to implement this computation, but since we
// may need cube pruning and state splitting, we reuse the feature detector framework.
// the loss is then stored in a feature #0 (which is guaranteed to have weight 0 and
// never be a "real" feature).
class TrombleLossComputerImpl;
class TrombleLossComputer : private boost::base_from_member<boost::scoped_ptr<TrombleLossComputerImpl> >, public FeatureFunction {
private:
typedef boost::scoped_ptr<TrombleLossComputerImpl> PImpl;
typedef FeatureFunction Base;
public:
// String parameters are ref.txt num_ref weight1 weight2 ... weightn
// where ref.txt contains references on per line, with num_ref references per sentence
// The weights are the weight on each length n-gram.
explicit TrombleLossComputer(const std::string ¶ms);
~TrombleLossComputer();
protected:
virtual void TraversalFeaturesImpl(const SentenceMetadata& smeta,
const Hypergraph::Edge& edge,
const std::vector<const void*>& ant_contexts,
SparseVector<double>* features,
SparseVector<double>* estimated_features,
void* out_context) const;
private:
const int fid_;
};
#endif
|