Revision 6a455db8f33ceefa0f8c544dc427ba6402f85588 authored by st-- on 07 January 2020, 21:15:01 UTC, committed by GitHub on 07 January 2020, 21:15:01 UTC
Allows gpflow2 to work better with tf.function() when static shapes are unknown (e.g. when minibatching). Closes #1179 

Co-authored-by: marcoadurno <marco.adurno@gmail.com>
1 parent fb66fb5
Raw File
test_model.py
# Copyright 2019 the GPflow authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import gpflow
import numpy as np
import pytest
from gpflow.utilities import set_trainable

rng = np.random.RandomState(0)


class Data:
    N = 10
    D = 1
    X = rng.rand(N, D)
    Y = rng.rand(N, 1)
    ls = 2.0
    var = 1.0


# ------------------------------------------
# Fixtures
# ------------------------------------------


@pytest.fixture
def model():
    return gpflow.models.GPR(
        (Data.X, Data.Y),
        kernel=gpflow.kernels.SquaredExponential(lengthscale=Data.ls, variance=Data.var),
    )


def test_non_trainable_model_objective(model):
    """
    Checks that we can still compute the objective of a model that has no
    trainable parameters whatsoever (regression test for bug in log_prior()).
    In this case we have no priors, so log_prior should be zero to add no
    contribution to the objective.
    """
    set_trainable(model, False)

    _ = model.log_marginal_likelihood()
    assert model.log_prior() == 0.0
back to top