-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathregressLinearKernels.py
47 lines (30 loc) · 1.68 KB
/
regressLinearKernels.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import numpy as np
from GPy_ABCD.KernelExpansion.grammar import *
from GPy_ABCD.Util.dataAndPlottingUtil import *
from GPy_ABCD.Util.modelUtil import *
from GPy_ABCD.Kernels.linearKernel import Linear
from GPy_ABCD.Kernels.linearOffsetKernel import LinearWithOffset
from GPy_ABCD.Kernels.baseKernels import C, __USE_LIN_KERNEL_HORIZONTAL_OFFSET
# X, Y = generate_data(lambda x: 2 * x + 20, np.linspace(-20, 20, 201), 1, 0.5)
# # kernel = Linear(1)
# # kernel = Linear(1) + C()
# kernel = LinearWithOffset(1)
# X, Y = generate_data(lambda x: - (x - 2) * (x + 3), np.linspace(-20, 20, 201), 1, 20)
# # kernel = Linear(1) * Linear(1)
# # kernel = Linear(1) * Linear(1) + C()
# # kernel = LinearWithOffset(1) * LinearWithOffset(1)
# # kernel = LinearWithOffset(1) * LinearWithOffset(1) + C()
# kernel = ProductKE(['LIN', 'LIN']).to_kernel()
# # kernel = SumKE(['C'], [ProductKE(['LIN', 'LIN'])]).to_kernel()
X, Y = generate_data(lambda x: - (x - 12) * (x + 18) * (x - 7), np.linspace(-20, 20, 201), 1, 20)
kernel = ProductKE(['LIN', 'LIN', 'LIN']).to_kernel()
# np.seterr(all='raise') # Raise exceptions instead of RuntimeWarnings. The exceptions can then be caught by the debugger
mod = fit_GPy_kern(X, Y, kernel, 20, optimizer = GPy_optimisers[0])
# mod = fit_kex(X, Y, correct_k, 10)
# model_printout(mod)
plt.show()
# VERDICT: LinearWithOffset is just better: better fits (even accounting for extra params), is interpretable (the offsets are just roots),
# and having a single variance per product is even better
# from GPy_ABCD.Util.benchmarking import timethis
# times = timethis(fit_GPy_kern, 5, X, Y, kernel, 20, optimizer = GPy_optimisers[0])
# print(sum(times) / len(times))