Skip to content

Commit

Permalink
x_out in posterior optional; will be defaulted to first entry in outp…
Browse files Browse the repository at this point in the history
…ut positions
  • Loading branch information
MarcusMNoack committed Nov 19, 2024
1 parent 3263a72 commit dc82d68
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 7 deletions.
11 changes: 6 additions & 5 deletions fvgp/fvgp.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ class provides all the methods described for the GP (:py:class:`fvgp.GP`) class.
y_data : np.ndarray or list
The values of the data points. Shape (V,No) if `y_data` is an array.
It is possible that not every entry in `x_data`
has all corresponding tasks available. In that case `y_data` can be a list. In that case make sure
that every entry in `y_data` has a corresponding `output_position` of the same shape.
has all corresponding tasks available. In that case `y_data` can be a list of numpy arrays.
In that case make sure that every entry in `y_data` has a corresponding `output_position` of the same shape.
init_hyperparameters : np.ndarray, optional
Vector of hyperparameters used to initiate the GP.
The default is an array of ones with the right length for the anisotropic Matern
Expand Down Expand Up @@ -314,10 +314,10 @@ def __init__(
else: self.input_space_dim = 1

self.output_num = len(y_data[0])
###check the output dims

if isinstance(y_data, np.ndarray) and np.ndim(y_data) == 1:
raise ValueError("The output number is 1, you can use the GP class for single-task GPs")
if isinstance(y_data, np.ndarray):
if np.ndim(y_data) == 1:
raise ValueError("The output number is 1, you can use the GP class for single-task GPs")
if output_positions is None:
self.output_positions = self._compute_standard_output_positions(len(x_data))
else:
Expand Down Expand Up @@ -356,6 +356,7 @@ def __init__(
args=args)

if self.data.Euclidean: assert self.index_set_dim == self.input_space_dim + 1
self.posterior.x_out = self.output_positions[0]

def update_gp_data(
self,
Expand Down
20 changes: 18 additions & 2 deletions fvgp/gp_posterior.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import numpy as np
from loguru import logger
from scipy.sparse import issparse
from .gp_lin_alg import *


Expand All @@ -18,6 +17,7 @@ def __init__(self,
self.kernel = self.prior_obj.kernel
self.mean_function = self.prior_obj.mean_function
self.d_kernel_dx = self.prior_obj.d_kernel_dx
self.x_out = None

def posterior_mean(self, x_pred, hyperparameters=None, x_out=None):
x_data, y_data, KVinvY = \
Expand All @@ -30,8 +30,10 @@ def posterior_mean(self, x_pred, hyperparameters=None, x_out=None):
else:
hyperparameters = self.prior_obj.hyperparameters

if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
x_orig = x_pred.copy()

if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)

k = self.kernel(x_data, x_pred, hyperparameters)
Expand All @@ -54,6 +56,7 @@ def posterior_mean_grad(self, x_pred, hyperparameters=None, x_out=None, directio
else:
hyperparameters = self.prior_obj.hyperparameters

if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
x_orig = x_pred.copy()
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)
Expand Down Expand Up @@ -86,7 +89,7 @@ def posterior_mean_grad(self, x_pred, hyperparameters=None, x_out=None, directio
###########################################################################
def posterior_covariance(self, x_pred, x_out=None, variance_only=False, add_noise=False):
x_data = self.data_obj.x_data.copy()

if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
x_orig = x_pred.copy()
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)
Expand Down Expand Up @@ -128,6 +131,7 @@ def posterior_covariance(self, x_pred, x_out=None, variance_only=False, add_nois

def posterior_covariance_grad(self, x_pred, x_out=None, direction=None):
x_data = self.data_obj.x_data.copy()
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
x_orig = x_pred.copy()
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)
Expand Down Expand Up @@ -172,6 +176,7 @@ def joint_gp_prior(self, x_pred, x_out=None):
x_data, K, prior_mean_vec = (self.data_obj.x_data.copy(),
self.prior_obj.K.copy() + (np.identity(len(self.prior_obj.K)) * 1e-9),
self.prior_obj.m.copy())
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)

Expand All @@ -193,6 +198,7 @@ def joint_gp_prior_grad(self, x_pred, direction, x_out=None):
x_data, K, prior_mean_vec = (self.data_obj.x_data.copy(),
self.prior_obj.K.copy() + (np.identity(len(self.prior_obj.K)) * 1e-9),
self.prior_obj.m.copy())
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)

Expand Down Expand Up @@ -242,6 +248,7 @@ def gp_entropy(self, x_pred, x_out=None):
------
Entropy : float
"""
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)

Expand All @@ -253,6 +260,7 @@ def gp_entropy(self, x_pred, x_out=None):

###########################################################################
def gp_entropy_grad(self, x_pred, direction, x_out=None):
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)

Expand Down Expand Up @@ -290,6 +298,7 @@ def kl_div(self, mu1, mu2, S1, S2):

###########################################################################
def gp_kl_div(self, x_pred, comp_mean, comp_cov, x_out=None):
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)

Expand All @@ -306,6 +315,7 @@ def gp_kl_div(self, x_pred, comp_mean, comp_cov, x_out=None):

###########################################################################
def gp_kl_div_grad(self, x_pred, comp_mean, comp_cov, direction, x_out=None):
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)

Expand All @@ -330,6 +340,7 @@ def mutual_information(self, joint, m1, m2):
###########################################################################
def gp_mutual_information(self, x_pred, x_out=None, add_noise=False):
x_data, K = self.data_obj.x_data.copy(), self.prior_obj.K.copy() + (np.identity(len(self.prior_obj.K)) * 1e-9)
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
x_orig = x_pred.copy()
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)
Expand All @@ -345,6 +356,7 @@ def gp_mutual_information(self, x_pred, x_out=None, add_noise=False):
###########################################################################
def gp_total_correlation(self, x_pred, x_out=None, add_noise=False):
x_data, K = self.data_obj.x_data.copy(), self.prior_obj.K.copy() + (np.identity(len(self.prior_obj.K)) * 1e-9)
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
x_orig = x_pred.copy()
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)
Expand All @@ -362,6 +374,7 @@ def gp_total_correlation(self, x_pred, x_out=None, add_noise=False):

###########################################################################
def gp_relative_information_entropy(self, x_pred, x_out=None, add_noise=False):
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
x_orig = x_pred.copy()
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)
Expand All @@ -377,6 +390,7 @@ def gp_relative_information_entropy(self, x_pred, x_out=None, add_noise=False):

###########################################################################
def gp_relative_information_entropy_set(self, x_pred, x_out=None, add_noise=False):
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
x_orig = x_pred.copy()
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)
Expand All @@ -390,6 +404,7 @@ def gp_relative_information_entropy_set(self, x_pred, x_out=None, add_noise=Fals

###########################################################################
def posterior_probability(self, x_pred, comp_mean, comp_cov, x_out=None):
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)

Expand All @@ -415,6 +430,7 @@ def posterior_probability(self, x_pred, comp_mean, comp_cov, x_out=None):
}

def posterior_probability_grad(self, x_pred, comp_mean, comp_cov, direction, x_out=None):
if x_out is None: x_out = self.x_out
self._perform_input_checks(x_pred, x_out)
if x_out is not None: x_pred = self.cartesian_product(x_pred, x_out)

Expand Down
3 changes: 3 additions & 0 deletions tests/test_fvgp.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,8 +239,11 @@ def mkernel(x1,x2,hps):
my_fvgp.train(hyperparameter_bounds=np.array([[0.01,1],[0.01,10]]),
method = "global", pop_size = 10, tolerance = 0.001, max_iter = 2, dask_client=client, info = True)
my_fvgp.posterior_mean(np.random.rand(10,5), x_out = np.array([0,1]))["f(x)"]
my_fvgp.posterior_mean(np.random.rand(10,5))["f(x)"]
my_fvgp.posterior_mean_grad(np.random.rand(10,5), x_out = np.array([0,1]))["df/dx"]
my_fvgp.posterior_mean_grad(np.random.rand(10,5))["df/dx"]
my_fvgp.posterior_covariance(np.random.rand(10,5), x_out = np.array([0,1]))["v(x)"]
my_fvgp.posterior_covariance(np.random.rand(10,5))["v(x)"]



Expand Down

0 comments on commit dc82d68

Please sign in to comment.