https://github.com/GPflow/GPflow
Revision ad6e03114fa6903585c31da1528a47f4dcb2049d authored by Vincent Dutordoir on 15 September 2020, 15:37:25 UTC, committed by GitHub on 15 September 2020, 15:37:25 UTC
* HeteroskedasticLikelihood base class draft * fixup * cleanup * cleanup heteroskedastic * multioutput likelihood WIP * Notebook exemplifying HeteroskedasticTFPDistribution usage (#1462) * fixes * typo fix; reshaping fix * notebook showing how to use HeteroskedasticTFPDistribution likelihood * converting to .pct.py format * removed .ipynb * better descriptions * black auto-formatting Co-authored-by: Gustavo Carvalho <gustavo.carvalho@delfosim.com> * note and bugfix * add comment * Adding heteroskedastic tests (#1508) These tests ensure that heteroskedastic likelihood with a constant variance, will give the same results as a Gaussian likelihood with the same variance. * testing * added QuadratureLikelihood to base, refactored ScalarLikelihood to use it * fix * using the first dimension to hold the quadrature summation * adapting ndiagquad wrapper * merged with gustavocmv/quadrature-change-shape * removed unecessary tf.init_scope * removed print and tf.print * removed print and tf.print * Type annotations Co-authored-by: Vincent Dutordoir <dutordoirv@gmail.com> * Work * Fix test * Remove multioutput from PR * Fix notebook * Add student t test * More tests * Copyright * Removed NDiagGHQuadratureLikelihood class in favor of non-abstract QuadratureLikelihood * _set_latent_and_observation_dimension_eagerly * n_gh ---> num_gauss_hermite_points * removed NDiagGHQuadratureLikelihood from test * black * bugfix * removing NDiagGHQuadratureLikelihood from test * fixed bad commenting * black * refactoring scalar likelihood * adding dtype casts to quadrature * black * small merging fixes * DONE: swap n_gh for num_gauss_hermite_points * black Co-authored-by: ST John <st@prowler.io> Co-authored-by: gustavocmv <47801305+gustavocmv@users.noreply.github.com> Co-authored-by: Gustavo Carvalho <gustavo.carvalho@delfosim.com> Co-authored-by: st-- <st--@users.noreply.github.com> Co-authored-by: joshuacoales-pio <47976939+joshuacoales-pio@users.noreply.github.com>
1 parent 799b659
Tip revision: ad6e03114fa6903585c31da1528a47f4dcb2049d authored by Vincent Dutordoir on 15 September 2020, 15:37:25 UTC
Multi Latent Likelihoods using new quadrature Likelihoods (#1559)
Multi Latent Likelihoods using new quadrature Likelihoods (#1559)
Tip revision: ad6e031
test_dynamic_shapes.py
# Copyright 2019 the GPflow authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
import tensorflow as tf
from numpy.testing import assert_allclose
import gpflow
from gpflow.config import default_jitter, default_float
from gpflow.mean_functions import Constant
rng = np.random.RandomState(0)
class Datum:
X = rng.rand(20, 1) * 10
Y = np.sin(X) + 0.9 * np.cos(X * 1.6) + rng.randn(*X.shape) * 0.8
Y = np.tile(Y, 2) # two identical columns
Xtest = rng.rand(10, 1) * 10
data = (X, Y)
# for classification:
Yc = Y[:, :1]
cdata = (X, Yc)
@pytest.mark.parametrize("whiten", [True, False])
@pytest.mark.parametrize("q_diag", [True, False])
def test_svgp(whiten, q_diag):
model = gpflow.models.SVGP(
gpflow.kernels.SquaredExponential(),
gpflow.likelihoods.Gaussian(),
inducing_variable=Datum.X.copy(),
q_diag=q_diag,
whiten=whiten,
mean_function=gpflow.mean_functions.Constant(),
num_latent_gps=Datum.Y.shape[1],
)
gpflow.set_trainable(model.inducing_variable, False)
# test with explicitly unknown shapes:
tensor_spec = tf.TensorSpec(shape=None, dtype=default_float())
elbo = tf.function(model.elbo, input_signature=[(tensor_spec, tensor_spec)],)
@tf.function
def model_closure():
return -elbo(Datum.data)
opt = gpflow.optimizers.Scipy()
# simply test whether it runs without erroring...:
opt.minimize(
model_closure, variables=model.trainable_variables, options=dict(maxiter=3), compile=True,
)
def test_multiclass():
num_classes = 3
model = gpflow.models.SVGP(
gpflow.kernels.SquaredExponential(),
gpflow.likelihoods.MultiClass(num_classes=num_classes),
inducing_variable=Datum.X.copy(),
num_latent_gps=num_classes,
)
gpflow.set_trainable(model.inducing_variable, False)
# test with explicitly unknown shapes:
tensor_spec = tf.TensorSpec(shape=None, dtype=default_float())
elbo = tf.function(model.elbo, input_signature=[(tensor_spec, tensor_spec)],)
@tf.function
def model_closure():
return -elbo(Datum.cdata)
opt = gpflow.optimizers.Scipy()
# simply test whether it runs without erroring...:
opt.minimize(
model_closure, variables=model.trainable_variables, options=dict(maxiter=3), compile=True,
)
Computing file changes ...