https://github.com/GPflow/GPflow
Revision 30120a183c81cc5d18af0a00ae35e3ec9e06063a authored by Artem Artemev on 25 November 2017, 12:33:07 UTC, committed by Mark van der Wilk on 25 November 2017, 12:33:07 UTC
* Add a bunch of test for Parameter and DataHolder. Minibatch seed can be changed after cleaning or in defer_build.

* Add dataholder tests.

* Add failure creation tests to data holders.

* Shape and dtype for dataholders work for external tensors.

* According fix in dataholder test.

* Change travis file.

* Experiments with travis.

* Improvements in Parameterized tests.

* Exclude external optimizer from coverage report.

* Improvements in optimizer tests.

* Indent functions in optimizer's tests.

* Increase coverage in Parameterized.

* Increase codecov for Parameterized.

* Developing test coverage further.

* Tests Parameterized feeds.

* Call parameter size in tests.

* 100% for ParamList.

* Minibatch tests.

* Increase coverage for Minibatch.

* Setup.py clean up. Stick to 1.4 tensorflow version. Remove backwards for py2.7.

* Increase coverage for Parameters.

* Increase coverage for Parameters build check.

* Core tests for TensorConverter.

* Code coverage targets.

* Change initialization policy at decors and trainers.

* Check initialization status of variable using tf.is_variable_initialized tensor.

* [#568] Improve initialization status checking performance.

* [#568, #561] Test for Dataset iterators is not possible.

* Increase coverage for Parameter.  method was added, therefore required some tests.  method inconsistently handled session inputs.

* Added more tests for Parameter object covering cases when tensor is trainable and parameter is not. Also added test for Parameter string conversion.
1 parent 01c2aa3
Raw File
Tip revision: 30120a183c81cc5d18af0a00ae35e3ec9e06063a authored by Artem Artemev on 25 November 2017, 12:33:07 UTC
Address issues #568, #561 (#575)
Tip revision: 30120a1
mean_functions.py
# Copyright 2016 James Hensman, alexggmatthews, PabloLeon, Valentine Svensson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import tensorflow as tf
import numpy as np

from . import settings
from .params import Parameter
from .params import Parameterized
from .params import ParamList
from .decors import params_as_tensors


class MeanFunction(Parameterized):
    """
    The base mean function class.
    To implement a mean function, write the __call__ method. This takes a
    tensor X and returns a tensor m(X). In accordance with the GPflow
    standard, each row of X represents one datum, and each row of Y is computed
    independently for each row of X.

    MeanFunction classes can have parameters, see the Linear class for an
    example.
    """
    def __call__(self, X):
        raise NotImplementedError("Implement the __call__ method for this mean function")

    def __add__(self, other):
        return Additive(self, other)

    def __mul__(self, other):
        return Product(self, other)


class Zero(MeanFunction):
    def __call__(self, X):
        return tf.zeros(tf.stack([tf.shape(X)[0], 1]), dtype=settings.tf_float)


class Linear(MeanFunction):
    """
    y_i = A x_i + b
    """
    def __init__(self, A=None, b=None):
        """
        A is a matrix which maps each element of X to Y, b is an additive
        constant.

        If X has N rows and D columns, and Y is intended to have Q columns,
        then A must be D x Q, b must be a vector of length Q.
        """
        A = np.ones((1, 1)) if A is None else A
        b = np.zeros(1) if b is None else b
        MeanFunction.__init__(self)
        self.A = Parameter(np.atleast_2d(A))
        self.b = Parameter(b)

    @params_as_tensors
    def __call__(self, X):
        return tf.matmul(X, self.A) + self.b


class Constant(MeanFunction):
    """
    y_i = c,,
    """
    def __init__(self, c=None):
        MeanFunction.__init__(self)
        c = np.zeros(1) if c is None else c
        self.c = Parameter(c)

    @params_as_tensors
    def __call__(self, X):
        shape = tf.stack([tf.shape(X)[0], 1])
        return tf.tile(tf.reshape(self.c, (1, -1)), shape)


class SwitchedMeanFunction(MeanFunction):
    """
    This class enables to use different (independent) mean_functions respective
    to the data 'label'.
    We assume the 'label' is stored in the extra column of X.
    """
    def __init__(self, meanfunction_list):
        MeanFunction.__init__(self)
        for m in meanfunction_list:
            assert isinstance(m, MeanFunction)
        self.meanfunction_list = ParamList(meanfunction_list)
        self.num_meanfunctions = len(self.meanfunction_list)

    @params_as_tensors
    def __call__(self, X):
        ind = tf.gather(tf.transpose(X), tf.shape(X)[1]-1)  # ind = X[:,-1]
        ind = tf.cast(ind, tf.int32)
        X = tf.transpose(tf.gather(tf.transpose(X), tf.range(0, tf.shape(X)[1]-1)))  # X = X[:,:-1]

        # split up X into chunks corresponding to the relevant likelihoods
        x_list = tf.dynamic_partition(X, ind, self.num_meanfunctions)
        # apply the likelihood-function to each section of the data
        results = [m(x) for x, m in zip(x_list, self.meanfunction_list)]
        # stitch the results back together
        partitions = tf.dynamic_partition(tf.range(0, tf.size(ind)), ind, self.num_meanfunctions)
        return tf.dynamic_stitch(partitions, results)


class Additive(MeanFunction):
    def __init__(self, first_part, second_part):
        MeanFunction.__init__(self)
        self.add_1 = first_part
        self.add_2 = second_part

    def __call__(self, X):
        return tf.add(self.add_1(X), self.add_2(X))


class Product(MeanFunction):
    def __init__(self, first_part, second_part):
        MeanFunction.__init__(self)

        self.prod_1 = first_part
        self.prod_2 = second_part

    def __call__(self, X):
        return tf.multiply(self.prod_1(X), self.prod_2(X))
back to top