Revision 48270681afc13081094f7f398a1e194c6b07ba9b authored by vdutor on 03 January 2018, 17:44:53 UTC, committed by Mark van der Wilk on 03 January 2018, 17:44:53 UTC
* Outline of new expectations code. * Quadrature code now uses TensorFlow shape inference. * General expectations work. * Expectations RBF kern, not tested * Add Identity mean function * General unittests for Expectations * Add multipledispatch package to travis * Update tests_expectations * Expectations of mean functions * Mean function uncertain conditional * Uncertain conditional with mean_function. Tested. * Support for Add and Prod kernels and quadrature fallback decorator * Refactor expectations unittests * Psi stats Linear kernel * Split expectations in different files * Expectation Linear kernel and Linear mean function * Remove None's from expectations api * Removed old ekernels framework * Add multipledispatch to setup file * Work on PR feedback, not finished * Addressed PR feedback * Support for pairwise xKxz * Enable expectations unittests * Renamed `TimeseriesGaussian` to `MarkovGaussian` and added tests. * Rename some variable, plus note for later test of <x Kxz>_q. * Update conditionals.py Add comment * Change order of inputs to (feat, kern) * Stef/expectations (#601) * adding gaussmarkov quad * don't override the markvogaussian in the quadrature * can't test * adding external test * quadrature code done and works for MarkovGauss * MarkovGaussian with quad implemented. All tests pass * Shape comments. * Removed superfluous autoflow functions for kernel expectations * Update kernels.py * Update quadrature.py
1 parent 2182bf0
mean_functions.py
# Copyright 2016 James Hensman, alexggmatthews, PabloLeon, Valentine Svensson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
import numpy as np
from . import settings
from .params import Parameter
from .params import Parameterized
from .params import ParamList
from .decors import params_as_tensors
class MeanFunction(Parameterized):
"""
The base mean function class.
To implement a mean function, write the __call__ method. This takes a
tensor X and returns a tensor m(X). In accordance with the GPflow
standard, each row of X represents one datum, and each row of Y is computed
independently for each row of X.
MeanFunction classes can have parameters, see the Linear class for an
example.
"""
def __call__(self, X):
raise NotImplementedError("Implement the __call__ method for this mean function")
def __add__(self, other):
return Additive(self, other)
def __mul__(self, other):
return Product(self, other)
class Linear(MeanFunction):
"""
y_i = A x_i + b
"""
def __init__(self, A=None, b=None):
"""
A is a matrix which maps each element of X to Y, b is an additive
constant.
If X has N rows and D columns, and Y is intended to have Q columns,
then A must be D x Q, b must be a vector of length Q.
"""
A = np.ones((1, 1)) if A is None else A
b = np.zeros(1) if b is None else b
MeanFunction.__init__(self)
self.A = Parameter(np.atleast_2d(A))
self.b = Parameter(b)
@params_as_tensors
def __call__(self, X):
return tf.matmul(X, self.A) + self.b
class Identity(Linear):
"""
y_i = x_i
"""
def __init__(self, input_dim=None):
Linear.__init__(self)
self.input_dim = input_dim
def __call__(self, X):
return X
@property
def A(self):
if self.input_dim is None:
raise ValueError("An input_dim needs to be specified when using the "
"`Identity` mean function in combination with expectations.")
return tf.eye(self.input_dim, dtype=settings.float_type)
@property
def b(self):
if self.input_dim is None:
raise ValueError("An input_dim needs to be specified when using the "
"`Identity` mean function in combination with expectations.")
return tf.zeros(self.input_dim, dtype=settings.float_type)
@A.setter
def A(self, A):
pass
@b.setter
def b(self, b):
pass
class Constant(MeanFunction):
"""
y_i = c,,
"""
def __init__(self, c=None):
MeanFunction.__init__(self)
c = np.zeros(1) if c is None else c
self.c = Parameter(c)
@params_as_tensors
def __call__(self, X):
shape = tf.stack([tf.shape(X)[0], 1])
return tf.tile(tf.reshape(self.c, (1, -1)), shape)
class Zero(Constant):
def __init__(self, output_dim=1):
Constant.__init__(self)
self.output_dim = output_dim
del self.c
def __call__(self, X):
return tf.zeros((tf.shape(X)[0], self.output_dim), dtype=settings.tf_float)
class SwitchedMeanFunction(MeanFunction):
"""
This class enables to use different (independent) mean_functions respective
to the data 'label'.
We assume the 'label' is stored in the extra column of X.
"""
def __init__(self, meanfunction_list):
MeanFunction.__init__(self)
for m in meanfunction_list:
assert isinstance(m, MeanFunction)
self.meanfunction_list = ParamList(meanfunction_list)
self.num_meanfunctions = len(self.meanfunction_list)
@params_as_tensors
def __call__(self, X):
ind = tf.gather(tf.transpose(X), tf.shape(X)[1]-1) # ind = X[:,-1]
ind = tf.cast(ind, tf.int32)
X = tf.transpose(tf.gather(tf.transpose(X), tf.range(0, tf.shape(X)[1]-1))) # X = X[:,:-1]
# split up X into chunks corresponding to the relevant likelihoods
x_list = tf.dynamic_partition(X, ind, self.num_meanfunctions)
# apply the likelihood-function to each section of the data
results = [m(x) for x, m in zip(x_list, self.meanfunction_list)]
# stitch the results back together
partitions = tf.dynamic_partition(tf.range(0, tf.size(ind)), ind, self.num_meanfunctions)
return tf.dynamic_stitch(partitions, results)
class Additive(MeanFunction):
def __init__(self, first_part, second_part):
MeanFunction.__init__(self)
self.add_1 = first_part
self.add_2 = second_part
def __call__(self, X):
return tf.add(self.add_1(X), self.add_2(X))
class Product(MeanFunction):
def __init__(self, first_part, second_part):
MeanFunction.__init__(self)
self.prod_1 = first_part
self.prod_2 = second_part
def __call__(self, X):
return tf.multiply(self.prod_1(X), self.prod_2(X))
Computing file changes ...