-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathrobustgp.py
195 lines (165 loc) · 5.67 KB
/
robustgp.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
import GPy
import numpy as np
from scipy.stats import norm, chi2
__all__ = ['robust_GP']
def robust_gp_old(X, Y, nsigs=np.repeat(2, 5), callback=None, callback_args=(),
**kwargs):
"""
Robust Gaussian process for data with outliers.
Parameters
----------
X: array shape (n, p)
Y: array shape (n, 1)
Input data.
nsigs: array shape (niter,)
List of n-sigma for iterations, should be a decreasing list.
Setting the last several n-sigma to be the same can give better
self-consistency.
Default: [2, 2, 2, 2, 2]
Alternative: 2**np.array([1, 0.8, 0.6, 0.4, 0.2, 0, 0, 0])
callback: callable
Function for checking the iteration process. It takes
the iteration number `i` and GPRegression object `gp` as input
e.g.
callback=lambda gp, i: print(i, gp.num_data, gp.param_array)
or
callback=lambda gp, i: gp.plot()
callback_args:
Extra parameters for callback.
**kwargs:
GPy.models.GPRegression parameters.
Returns
-------
gp:
GPy.models.GPRegression object.
"""
n, p = Y.shape
if p != 1:
raise ValueError("Y is expected in shape (n, 1).")
if (np.asarray(nsigs) <= 0).any():
raise ValueError("nsigs should be positive array.")
if (np.diff(nsigs) > 0).any():
raise ValueError("nsigs should be decreasing array.")
gp = GPy.models.GPRegression(X, Y, **kwargs)
gp.optimize()
if callback is not None:
callback(gp, 0, *callback_args)
niter = len(nsigs)
for i in range(niter):
mean, var = gp.predict(X)
if i > 0:
# reference: Croux & Haesbroeck 1999
alpha = 2 * norm.cdf(nsigs[i - 1]) - 1
consistency_factor = alpha / chi2(p + 2).cdf(chi2(p).ppf(alpha))
var = var * consistency_factor
width = var**0.5 * nsigs[i]
ix = ((Y >= mean - width) & (Y <= mean + width)).ravel()
if i == 0:
ix_old = ix
elif (nsigs[i - 1] == nsigs[-1]) and (ix == ix_old).all():
break
else:
ix_old = ix
gp = GPy.models.GPRegression(X[ix], Y[ix], **kwargs)
gp.optimize()
if callback is not None:
callback(gp, i + 1, *callback_args)
return gp
def robust_GP(X, Y, alpha1=0.50, alpha2=0.95, alpha3=0.95,
niter0=0, niter1=10, niter2=1, exact=True,
callback=None, callback_args=(),
**kwargs):
"""
Robust Gaussian process for data with outliers.
Three steps:
1. contraction
2. refinement
3. outlier detection
Parameters
----------
X: array shape (n, p)
Y: array shape (n, 1)
Input data.
alpha1, alpha2:
Coverage fraction used in contraction step and refinement step respectively.
alpha3:
Outlier threshold.
niter0:
Extra iteration before start.
niter1, niter2:
Maximum iteration allowed in contraction step and refinement step respectively.
callback: callable
Function for checking the iteration process. It takes
the GPRegression object `gp`, consistency factor and iteration number `i` as input
e.g.
callback=lambda gp, c, i: print(i, gp.num_data, gp.param_array)
or
callback=lambda gp, c, i: gp.plot()
callback_args:
Extra parameters for callback.
**kwargs:
GPy.core.GP parameters.
Returns
-------
gp:
GPy.core.GP object.
consistency:
Consistency factor.
ix_out:
Boolean index for outliers.
"""
n, p = Y.shape
if p != 1:
raise ValueError("Y is expected in shape (n, 1).")
kwargs.setdefault('likelihood', GPy.likelihoods.Gaussian(variance=1.0))
kwargs.setdefault('kernel', GPy.kern.RBF(X.shape[1]))
kwargs.setdefault('name', 'Robust GP regression')
# first iteration
gp = GPy.core.GP(X, Y, **kwargs)
gp.optimize()
consistency = 1
mean, var = gp.predict(X)
dist = np.ravel((Y - mean)**2 / (var))
if callback is not None:
callback(gp, consistency, 0, *callback_args)
ix_old = None
niter1 = niter0 + niter1
# contraction step
for i in range(niter1):
if i < niter0:
alpha_ = alpha1 + (1 - alpha1) * ((niter0 - 1 - i) / niter0)
else:
alpha_ = alpha1
h = min(int(np.ceil(n * alpha_)), n) - 1
dist_th = np.partition(dist, h)[h]
eta_sq1 = chi2(p).ppf(alpha_)
ix_sub = dist <= dist_th
if (i > niter0) and (ix_sub == ix_old).all():
break # converged
ix_old = ix_sub
gp = GPy.core.GP(X[ix_sub], Y[ix_sub], **kwargs)
gp.optimize()
consistency = alpha_ / chi2(p + 2).cdf(eta_sq1)
mean, var = gp.predict(X)
dist = np.ravel((Y - mean)**2 / (var * consistency))
if callback is not None:
callback(gp, consistency, i + 1, *callback_args)
# refinement step
for i in range(niter1, niter1 + niter2):
eta_sq2 = chi2(p).ppf(alpha2)
ix_sub = dist <= eta_sq2 * consistency
if (i > niter1) and (ix_sub == ix_old).all():
break # converged
ix_old = ix_sub
gp = GPy.core.GP(X[ix_sub], Y[ix_sub], **kwargs)
gp.optimize()
consistency = alpha2 / chi2(p + 2).cdf(eta_sq2)
mean, var = gp.predict(X)
dist = np.ravel((Y - mean)**2 / (var * consistency))
if callback is not None:
callback(gp, consistency, i + 1, *callback_args)
# outlier detection
score = dist**0.5
eta_sq3 = chi2(p).ppf(alpha3)
ix_out = dist > eta_sq3
return gp, consistency, score, ix_out