Revision 48270681afc13081094f7f398a1e194c6b07ba9b authored by vdutor on 03 January 2018, 17:44:53 UTC, committed by Mark van der Wilk on 03 January 2018, 17:44:53 UTC
* Outline of new expectations code. * Quadrature code now uses TensorFlow shape inference. * General expectations work. * Expectations RBF kern, not tested * Add Identity mean function * General unittests for Expectations * Add multipledispatch package to travis * Update tests_expectations * Expectations of mean functions * Mean function uncertain conditional * Uncertain conditional with mean_function. Tested. * Support for Add and Prod kernels and quadrature fallback decorator * Refactor expectations unittests * Psi stats Linear kernel * Split expectations in different files * Expectation Linear kernel and Linear mean function * Remove None's from expectations api * Removed old ekernels framework * Add multipledispatch to setup file * Work on PR feedback, not finished * Addressed PR feedback * Support for pairwise xKxz * Enable expectations unittests * Renamed `TimeseriesGaussian` to `MarkovGaussian` and added tests. * Rename some variable, plus note for later test of <x Kxz>_q. * Update conditionals.py Add comment * Change order of inputs to (feat, kern) * Stef/expectations (#601) * adding gaussmarkov quad * don't override the markvogaussian in the quadrature * can't test * adding external test * quadrature code done and works for MarkovGauss * MarkovGaussian with quad implemented. All tests pass * Shape comments. * Removed superfluous autoflow functions for kernel expectations * Update kernels.py * Update quadrature.py
1 parent 2182bf0
test_gplvm.py
# Copyright 2017 the GPflow authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.from __future__ import print_function
import tensorflow as tf
import numpy as np
import gpflow
from gpflow.test_util import GPflowTestCase
from gpflow import kernels
np.random.seed(0)
class TestGPLVM(GPflowTestCase):
def setUp(self):
# data
self.N = 20 # number of data points
D = 5 # data dimension
self.rng = np.random.RandomState(1)
self.Y = self.rng.randn(self.N, D)
# model
self.Q = 2 # latent dimensions
def test_optimise(self):
with self.test_context():
m = gpflow.models.GPLVM(self.Y, self.Q)
linit = m.compute_log_likelihood()
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m, maxiter=2)
self.assertTrue(m.compute_log_likelihood() > linit)
def test_otherkernel(self):
with self.test_context():
k = kernels.Periodic(self.Q)
XInit = self.rng.rand(self.N, self.Q)
m = gpflow.models.GPLVM(self.Y, self.Q, XInit, k)
linit = m.compute_log_likelihood()
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m, maxiter=2)
self.assertTrue(m.compute_log_likelihood() > linit)
class TestBayesianGPLVM(GPflowTestCase):
def setUp(self):
# data
self.N = 20 # number of data points
self.D = 5 # data dimension
self.rng = np.random.RandomState(1)
self.Y = self.rng.randn(self.N, self.D)
# model
self.M = 10 # inducing points
def test_1d(self):
with self.test_context():
Q = 1 # latent dimensions
k = kernels.RBF(Q)
Z = np.linspace(0, 1, self.M)
Z = np.expand_dims(Z, Q) # inducing points
m = gpflow.models.BayesianGPLVM(
X_mean=np.zeros((self.N, Q)),
X_var=np.ones((self.N, Q)),
Y=self.Y,
kern=k,
M=self.M,
Z=Z)
linit = m.compute_log_likelihood()
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m, maxiter=2)
self.assertTrue(m.compute_log_likelihood() > linit)
def test_2d(self):
with self.test_context():
# test default Z on 2_D example
Q = 2 # latent dimensions
X_mean = gpflow.models.PCA_reduce(self.Y, Q)
k = kernels.RBF(Q, ARD=False)
m = gpflow.models.BayesianGPLVM(
X_mean=X_mean,
X_var=np.ones((self.N, Q)),
Y=self.Y,
kern=k,
M=self.M)
linit = m.compute_log_likelihood()
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m, maxiter=2)
self.assertTrue(m.compute_log_likelihood() > linit)
# test prediction
Xtest = self.rng.randn(10, Q)
mu_f, var_f = m.predict_f(Xtest)
mu_fFull, var_fFull = m.predict_f_full_cov(Xtest)
self.assertTrue(np.allclose(mu_fFull, mu_f))
# check full covariance diagonal
for i in range(self.D):
self.assertTrue(np.allclose(var_f[:, i], np.diag(var_fFull[:, :, i])))
if __name__ == "__main__":
tf.test.main()
Computing file changes ...