-
Notifications
You must be signed in to change notification settings - Fork 28
/
Copy pathCPpa.h
220 lines (188 loc) · 5.47 KB
/
CPpa.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
/*This file contains classes for representing Gaussian processes using the variational method of probabilistic point assimilation (PPA). The model relies on the CKern class and the CNoise class to define the kernel and the noise model.*/
#ifndef CPPA_H
#define CPPA_H
#include "CDataModel.h"
using namespace std;
const double NULOW=1e-16;
const string PPAVERSION="0.1";
class CPpa : public COptimisableKernelModel {
public:
// Constructor given a filename.
CPpa(const string modelFileName, const int verbos=2);
// Constructor given a kernel and a noise model.
CPpa(const CMatrix& inData, const CMatrix& targetData,
CKern& kernel, CNoise& noiseModel, const int verbos=2);
CPpa(const CMatrix& trX, const CMatrix& trY,
const CMatrix& mmat, const CMatrix& betamat, CKern& kernel,
CNoise& noiseModel,
const int verbos=2);
#ifdef _NDLMATLAB
// Constructor using file containing ppaInfo.
CPpa(const CMatrix& inData,
const CMatrix& targetData,
CKern& kernel,
CNoise& noiseModel,
const string ppaInfoFile,
const string ppaInfoVariable,
const int verbos=2);
#endif
// Initialise the storeage for the model.
void initStoreage();
// Set the initial values for the model.
void initVals();
// update the site parameters at index.
void updateSite(const int index);
// Run the expectation step in the E-M algorithm.
void eStep();
// Run the maximisation step in the E-M algorithm.
void mStep();
// Update expectations of f.
void updateExpectationf();
// Update expectations of f^2
void updateExpectationff();
// Update expectations of fBar and fBarfBar
void updateExpectationsfBar();
void test(const CMatrix& ytest, const CMatrix& Xin) const;
void likelihoods(CMatrix& pout, CMatrix& yTest, const CMatrix& Xin) const;
// log likelihood of training set.
double logLikelihood() const;
// log likelihood of test set.
double logLikelihood(const CMatrix& yTest, const CMatrix& Xin) const;
void out(CMatrix& yPred, const CMatrix& inData) const;
void out(CMatrix& yPred, CMatrix& probPred, const CMatrix& inData) const;
void posteriorMeanVar(CMatrix& mu, CMatrix& varSigma, const CMatrix& X) const;
string getNoiseName() const
{
return noise.getNoiseName();
}
// Gradient routines
void updateCovGradient(int index) const;
inline void setTerminate(const bool val)
{
terminate = val;
}
inline bool isTerminate() const
{
return terminate;
}
void updateNuG();
// update K with the kernel computed from the training points.
void updateK() const;
// update invK with the inverse of the kernel plus beta terms computed from the training points.
void updateInvK(const int index=0) const;
// compute the approximation to the log likelihood.
double approxLogLikelihood() const;
// compute the gradients of the approximation wrt parameters.
void approxLogLikelihoodGradient(CMatrix& g) const;
void optimise(const int maxIters=15, const int kernIters=100, const int noiseIters=100);
bool equals(const CPpa& model, const double tol=ndlutil::MATCHTOL) const;
void display(ostream& os) const;
inline int getOptNumParams() const
{
return kern.getNumParams();
}
void getOptParams(CMatrix& param) const
{
kern.getTransParams(param);
}
void setOptParams(const CMatrix& param)
{
kern.setTransParams(param);
}
string getType() const
{
return type;
}
void setType(const string name)
{
type = name;
}
void computeObjectiveGradParams(CMatrix& g) const
{
approxLogLikelihoodGradient(g);
g.negate();
}
double computeObjectiveVal() const
{
return -approxLogLikelihood();
}
#ifdef _NDLMATLAB
mxArray* toMxArray() const;
void fromMxArray(const mxArray* matlabArray);
#endif
const CMatrix& X;
int getNumTrainData() const
{
return numTrainData;
}
int getNumActiveData() const
{
return numTrainData;
}
double getBetaVal(const int i, const int j) const
{
return beta.getVal(1, j);
}
int getNumProcesses() const
{
return numTarget;
}
int getNumInputs() const
{
return activeX.getCols();
}
double getTrainingX(const int i, const int j) const
{
return activeX.getVal(i, j);
}
int getTrainingPoint(const int i) const
{
return i;
}
// arguably these are noise model associated.
const CMatrix& y;
CMatrix nu;
CMatrix g;
CMatrix beta;
CMatrix Kstore;
CMatrix wasM;
CMatrix f;
CMatrix ff;
CMatrix fBar;
CMatrix gamma;
// Covariance of q distribution over fbar.
vector<CMatrix*> C;
// these really just provide local storage
mutable CMatrix covGrad;
mutable CMatrix invK;
mutable double logDetK;
mutable CMatrix K;
mutable CMatrix s;
mutable CMatrix a;
mutable CMatrix ainv;
CMatrix trainY;
CMatrix* M;
CMatrix* L;
CMatrix* Linv;
CKern& kern;
CNoise& noise;
private:
double logLike;
double oldLogLike;
double convergenceTol;
bool terminate;
bool varUpdate;
bool loadedModel;
int numCovStruct;
int numTrainData;
int numTarget;
int numData;
int numIters;
string type;
};
// Functions which operate on the object
void writePpaToStream(const CPpa& model, ostream& out);
void writePpaToFile(const CPpa& model, const string modelFileName, const string comment="");
CPpa* readPpaFromStream(istream& in);
CPpa* readPpaFromFile(const string modelfileName, const int verbosity=2);
#endif