https://github.com/GPflow/GPflow
Revision deb4508578f7223fa1ad5e3b6458626c4b41ef09 authored by Eric Hammy on 17 October 2019, 14:46:42 UTC, committed by GitHub on 17 October 2019, 14:46:42 UTC
1. Fix hidden bug in SGPR 2. Add the sgpr.compute_qu method from gpflow1 1. [Bug]. SGPR likelihoods were previously using full rank matrices instead of diagonal ones in both upper bound and likelihood calculation. Ie `Kdiag` was not "diag". This error was being masked by the intentional deactivation of tests comparing to the SGPR to the GPR, and what appears to be a hack to make tests working on the upper bound case. 2. [Migration]. Fixing the above broke another test, originally used for sgpr.compute_qu. The method sgpr.compute_qu had not been migrated from gpflow1, and a test that was meant to check it had been patched up to pass, erroneously. After speaking to @markvdw, concluded this method is useful, in particular to compare to SVGP model. The test has been patched up and the method ported to gpflow2.
1 parent 3b2a2ee
Tip revision: deb4508578f7223fa1ad5e3b6458626c4b41ef09 authored by Eric Hammy on 17 October 2019, 14:46:42 UTC
Fix hidden bug in SGPR (#1106)
Fix hidden bug in SGPR (#1106)
Tip revision: deb4508
test_multioutput_features.py
import numpy as np
import pytest
import tensorflow as tf
import gpflow
import gpflow.inducing_variables.mo_inducing_variables as mf
import gpflow.kernels.mo_kernels as mk
from gpflow.covariances import mo_kufs, mo_kuus
rng = np.random.RandomState(9911)
# ------------------------------------------
# Helpers
# ------------------------------------------
def make_kernel():
return gpflow.kernels.SquaredExponential()
def make_kernels(num):
return [make_kernel() for _ in range(num)]
def make_ip():
x = rng.permutation(Datum.X)
return gpflow.inducing_variables.InducingPoints(x[:Datum.M, ...])
def make_ips(num):
return [make_ip() for _ in range(num)]
# ------------------------------------------
# Data classes: storing constants
# ------------------------------------------
class Datum:
D = 1
L = 2
P = 3
M = 10
N = 100
W = rng.randn(P, L)
X = rng.randn(N)[:, None]
Xnew = rng.randn(N)[:, None]
multioutput_inducing_variable_list = [
mf.SharedIndependentInducingVariables(make_ip()),
mf.SeparateIndependentInducingVariables(make_ips(Datum.P))
]
multioutput_kernel_list = [
mk.SharedIndependent(make_kernel(), Datum.P),
mk.SeparateIndependent(make_kernels(Datum.L)),
mk.LinearCoregionalization(make_kernels(Datum.L), Datum.W)
]
@pytest.mark.parametrize('inducing_variable', multioutput_inducing_variable_list)
@pytest.mark.parametrize('kernel', multioutput_kernel_list)
def test_kuu(inducing_variable, kernel):
Kuu = mo_kuus.Kuu(inducing_variable, kernel, jitter=1e-9)
tf.linalg.cholesky(Kuu)
@pytest.mark.parametrize('inducing_variable', multioutput_inducing_variable_list)
@pytest.mark.parametrize('kernel', multioutput_kernel_list)
def test_kuf(inducing_variable, kernel):
Kuf = mo_kufs.Kuf(inducing_variable, kernel, Datum.Xnew)
@pytest.mark.parametrize('fun', [mo_kuus.Kuu, mo_kufs.Kuf])
def test_mixed_shared(fun):
inducing_variable = mf.SharedIndependentInducingVariables(make_ip())
kernel = mk.LinearCoregionalization(make_kernels(Datum.L), Datum.W)
if fun is mo_kuus.Kuu:
t = tf.linalg.cholesky(fun(inducing_variable, kernel, jitter=1e-9))
else:
t = fun(inducing_variable, kernel, Datum.Xnew)
print(t.shape)
Computing file changes ...