Revision 12243a6a2a325f823704be7c72c83e222dd3b53b authored by st-- on 02 June 2020, 12:16:24 UTC, committed by GitHub on 02 June 2020, 12:16:24 UTC
Doc improvement: Fixes some typos in the Scipy docstring and adds a note on scipy minimize()'s callback argument vs GPflow Scipy().minimize()'s step_callback. Cleans up types in the module. Backwards-incompatible change: the `step_callback` that can be passes to the Scipy.minimize() method is changed to take its arguments by position, not name; in line with the StepCallback type signature (and in line with Python's typing.Callable documentation which suggests that callbacks should take arguments preferentially by position, not name).
1 parent 89af61e
linears.py
import tensorflow as tf
from ..base import Parameter
from ..utilities import positive
from .base import Kernel
class Linear(Kernel):
"""
The linear kernel. Functions drawn from a GP with this kernel are linear, i.e. f(x) = cx.
The kernel equation is
k(x, y) = σ²xy
where σ² is the variance parameter.
"""
def __init__(self, variance=1.0, active_dims=None):
"""
:param variance: the (initial) value for the variance parameter(s),
to induce ARD behaviour this must be initialised as an array the same
length as the the number of active dimensions e.g. [1., 1., 1.]
:param active_dims: a slice or list specifying which columns of X are used
"""
super().__init__(active_dims)
self.variance = Parameter(variance, transform=positive())
self._validate_ard_active_dims(self.variance)
@property
def ard(self) -> bool:
"""
Whether ARD behaviour is active.
"""
return self.variance.shape.ndims > 0
def K(self, X, X2=None):
if X2 is None:
return tf.matmul(X * self.variance, X, transpose_b=True)
else:
return tf.tensordot(X * self.variance, X2, [[-1], [-1]])
def K_diag(self, X):
return tf.reduce_sum(tf.square(X) * self.variance, axis=-1)
class Polynomial(Linear):
"""
The Polynomial kernel. Functions drawn from a GP with this kernel are
polynomials of degree `d`. The kernel equation is
k(x, y) = (σ²xy + γ)ᵈ
where:
σ² is the variance parameter,
γ is the offset parameter,
d is the degree parameter.
"""
def __init__(self, degree=3.0, variance=1.0, offset=1.0, active_dims=None):
"""
:param degree: the degree of the polynomial
:param variance: the (initial) value for the variance parameter(s),
to induce ARD behaviour this must be initialised as an array the same
length as the the number of active dimensions e.g. [1., 1., 1.]
:param offset: the offset of the polynomial
:param active_dims: a slice or list specifying which columns of X are used
"""
super().__init__(variance, active_dims)
self.degree = degree
self.offset = Parameter(offset, transform=positive())
def K(self, X, X2=None):
return (super().K(X, X2) + self.offset) ** self.degree
def K_diag(self, X):
return (super().K_diag(X) + self.offset) ** self.degree
Computing file changes ...