Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

FEAT: Add Neural Likelihood Estimation (NLE) likelihood #827

Draft
wants to merge 17 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions bilby/core/likelihood/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .base import *
from .analytic import *
from .simulation_based_inference import *
308 changes: 2 additions & 306 deletions bilby/core/likelihood.py → bilby/core/likelihood/analytic.py
Original file line number Diff line number Diff line change
@@ -1,95 +1,9 @@
import copy

import numpy as np
from scipy.special import gammaln, xlogy
from scipy.stats import multivariate_normal

from .utils import infer_parameters_from_function, infer_args_from_function_except_n_args


class Likelihood(object):

def __init__(self, parameters=None):
"""Empty likelihood class to be subclassed by other likelihoods

Parameters
==========
parameters: dict
A dictionary of the parameter names and associated values
"""
self.parameters = parameters
self._meta_data = None
self._marginalized_parameters = []

def __repr__(self):
return self.__class__.__name__ + '(parameters={})'.format(self.parameters)

def log_likelihood(self):
"""

Returns
=======
float
"""
return np.nan

def noise_log_likelihood(self):
"""

Returns
=======
float
"""
return np.nan

def log_likelihood_ratio(self):
"""Difference between log likelihood and noise log likelihood

Returns
=======
float
"""
return self.log_likelihood() - self.noise_log_likelihood()

@property
def meta_data(self):
return getattr(self, '_meta_data', None)

@meta_data.setter
def meta_data(self, meta_data):
if isinstance(meta_data, dict):
self._meta_data = meta_data
else:
raise ValueError("The meta_data must be an instance of dict")

@property
def marginalized_parameters(self):
return self._marginalized_parameters


class ZeroLikelihood(Likelihood):
""" A special test-only class which already returns zero likelihood

Parameters
==========
likelihood: bilby.core.likelihood.Likelihood
A likelihood object to mimic

"""

def __init__(self, likelihood):
super(ZeroLikelihood, self).__init__(dict.fromkeys(likelihood.parameters))
self.parameters = likelihood.parameters
self._parent = likelihood

def log_likelihood(self):
return 0

def noise_log_likelihood(self):
return 0

def __getattr__(self, name):
return getattr(self._parent, name)
from .base import Likelihood
from ..utils import infer_parameters_from_function


class Analytical1DLikelihood(Likelihood):
Expand Down Expand Up @@ -514,222 +428,4 @@ def log_likelihood(self):
return -np.log(2) + np.logaddexp(self.pdf_1.logpdf(x), self.pdf_2.logpdf(x))


class JointLikelihood(Likelihood):
def __init__(self, *likelihoods):
"""
A likelihood for combining pre-defined likelihoods.
The parameters dict is automagically combined through parameters dicts
of the given likelihoods. If parameters have different values have
initially different values across different likelihoods, the value
of the last given likelihood is chosen. This does not matter when
using the JointLikelihood for sampling, because the parameters will be
set consistently

Parameters
==========
*likelihoods: bilby.core.likelihood.Likelihood
likelihoods to be combined parsed as arguments
"""
self.likelihoods = likelihoods
super(JointLikelihood, self).__init__(parameters={})
self.__sync_parameters()

def __sync_parameters(self):
""" Synchronizes parameters between the likelihoods
so that all likelihoods share a single parameter dict."""
for likelihood in self.likelihoods:
self.parameters.update(likelihood.parameters)
for likelihood in self.likelihoods:
likelihood.parameters = self.parameters

@property
def likelihoods(self):
""" The list of likelihoods """
return self._likelihoods

@likelihoods.setter
def likelihoods(self, likelihoods):
likelihoods = copy.deepcopy(likelihoods)
if isinstance(likelihoods, tuple) or isinstance(likelihoods, list):
if all(isinstance(likelihood, Likelihood) for likelihood in likelihoods):
self._likelihoods = list(likelihoods)
else:
raise ValueError('Try setting the JointLikelihood like this\n'
'JointLikelihood(first_likelihood, second_likelihood, ...)')
elif isinstance(likelihoods, Likelihood):
self._likelihoods = [likelihoods]
else:
raise ValueError('Input likelihood is not a list of tuple. You need to set multiple likelihoods.')

def log_likelihood(self):
""" This is just the sum of the log likelihoods of all parts of the joint likelihood"""
return sum([likelihood.log_likelihood() for likelihood in self.likelihoods])

def noise_log_likelihood(self):
""" This is just the sum of the noise likelihoods of all parts of the joint likelihood"""
return sum([likelihood.noise_log_likelihood() for likelihood in self.likelihoods])


def function_to_celerite_mean_model(func):
from celerite.modeling import Model as CeleriteModel
return _function_to_gp_model(func, CeleriteModel)


def function_to_george_mean_model(func):
from celerite.modeling import Model as GeorgeModel
return _function_to_gp_model(func, GeorgeModel)


def _function_to_gp_model(func, cls):
class MeanModel(cls):
parameter_names = tuple(infer_args_from_function_except_n_args(func=func, n=1))

def get_value(self, t):
params = {name: getattr(self, name) for name in self.parameter_names}
return func(t, **params)

def compute_gradient(self, *args, **kwargs):
pass

return MeanModel


class _GPLikelihood(Likelihood):

def __init__(self, kernel, mean_model, t, y, yerr=1e-6, gp_class=None):
"""
Basic Gaussian Process likelihood interface for `celerite` and `george`.
For `celerite` documentation see: https://celerite.readthedocs.io/en/stable/
For `george` documentation see: https://george.readthedocs.io/en/latest/

Parameters
==========
kernel: Union[celerite.term.Term, george.kernels.Kernel]
`celerite` or `george` kernel. See the respective package documentation about the usage.
mean_model: Union[celerite.modeling.Model, george.modeling.Model]
Mean model
t: array_like
The `times` or `x` values of the data set.
y: array_like
The `y` values of the data set.
yerr: float, int, array_like, optional
The error values on the y-values. If a single value is given, it is assumed that the value
applies for all y-values. Default is 1e-6, effectively assuming that no y-errors are present.
gp_class: type, None, optional
GPClass to use. This is determined by the child class used to instantiate the GP. Should usually
not be given by the user and is mostly used for testing
"""
self.kernel = kernel
self.mean_model = mean_model
self.t = np.array(t)
self.y = np.array(y)
self.yerr = np.array(yerr)
self.GPClass = gp_class
self.gp = self.GPClass(kernel=self.kernel, mean=self.mean_model, fit_mean=True, fit_white_noise=True)
self.gp.compute(self.t, yerr=self.yerr)
super().__init__(parameters=self.gp.get_parameter_dict())

def set_parameters(self, parameters):
"""
Safely set a set of parameters to the internal instances of the `gp` and `mean_model`, as well as the
`parameters` dict.

Parameters
==========
parameters: dict, pandas.DataFrame
The set of parameters we would like to set.
"""
for name, value in parameters.items():
try:
self.gp.set_parameter(name=name, value=value)
except ValueError:
pass
self.parameters[name] = value


class CeleriteLikelihood(_GPLikelihood):

def __init__(self, kernel, mean_model, t, y, yerr=1e-6):
"""
Basic Gaussian Process likelihood interface for `celerite` and `george`.
For `celerite` documentation see: https://celerite.readthedocs.io/en/stable/
For `george` documentation see: https://george.readthedocs.io/en/latest/

Parameters
==========
kernel: celerite.term.Term
`celerite` or `george` kernel. See the respective package documentation about the usage.
mean_model: celerite.modeling.Model
Mean model
t: array_like
The `times` or `x` values of the data set.
y: array_like
The `y` values of the data set.
yerr: float, int, array_like, optional
The error values on the y-values. If a single value is given, it is assumed that the value
applies for all y-values. Default is 1e-6, effectively assuming that no y-errors are present.
"""
import celerite
super().__init__(kernel=kernel, mean_model=mean_model, t=t, y=y, yerr=yerr, gp_class=celerite.GP)

def log_likelihood(self):
"""
Calculate the log-likelihood for the Gaussian process given the current parameters.

Returns
=======
float: The log-likelihood value.
"""
self.gp.set_parameter_vector(vector=np.array(list(self.parameters.values())))
try:
return self.gp.log_likelihood(self.y)
except Exception:
return -np.inf


class GeorgeLikelihood(_GPLikelihood):

def __init__(self, kernel, mean_model, t, y, yerr=1e-6):
"""
Basic Gaussian Process likelihood interface for `celerite` and `george`.
For `celerite` documentation see: https://celerite.readthedocs.io/en/stable/
For `george` documentation see: https://george.readthedocs.io/en/latest/

Parameters
==========
kernel: george.kernels.Kernel
`celerite` or `george` kernel. See the respective package documentation about the usage.
mean_model: george.modeling.Model
Mean model
t: array_like
The `times` or `x` values of the data set.
y: array_like
The `y` values of the data set.
yerr: float, int, array_like, optional
The error values on the y-values. If a single value is given, it is assumed that the value
applies for all y-values. Default is 1e-6, effectively assuming that no y-errors are present.
"""
import george
super().__init__(kernel=kernel, mean_model=mean_model, t=t, y=y, yerr=yerr, gp_class=george.GP)

def log_likelihood(self):
"""
Calculate the log-likelihood for the Gaussian process given the current parameters.

Returns
=======
float: The log-likelihood value.
"""
for name, value in self.parameters.items():
try:
self.gp.set_parameter(name=name, value=value)
except ValueError:
raise ValueError(f"Parameter {name} not a valid parameter for the GP.")
try:
return self.gp.log_likelihood(self.y)
except Exception:
return -np.inf


class MarginalizedLikelihoodReconstructionError(Exception):
pass
Loading
Loading