Skip to content

Commit

Permalink
Removing cached properties (#13)
Browse files Browse the repository at this point in the history
* Removed cachedproperties

* bump version to 0.5.1
  • Loading branch information
williamjameshandley authored Nov 6, 2023
1 parent 810e644 commit 4aeee8f
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 38 deletions.
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ lsbi: Linear Simulation Based Inference
=======================================
:lsbi: Linear Simulation Based Inference
:Author: Will Handley
:Version: 0.5.0
:Version: 0.5.1
:Homepage: https://github.com/handley-lab/lsbi
:Documentation: http://lsbi.readthedocs.io/

Expand Down
2 changes: 1 addition & 1 deletion lsbi/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.5.0'
__version__ = '0.5.1'
51 changes: 15 additions & 36 deletions lsbi/model.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Gaussian models for linear Bayesian inference."""
import numpy as np
from functools import cached_property
from scipy.stats import multivariate_normal
from lsbi.stats import mixture_multivariate_normal
from numpy.linalg import solve, inv, slogdet
Expand All @@ -14,8 +13,8 @@ def logdet(A):
class LinearModel(object):
"""A linear model.
D|theta ~ N( m + M theta, C)
theta ~ N( mu, Sigma)
D|theta ~ N( m + M theta, C )
theta ~ N( mu, Sigma )
Parameters: theta (n,)
Data: D (d,)
Expand Down Expand Up @@ -137,9 +136,9 @@ def posterior(self, D):
----------
D : array_like, shape (d,)
"""
Sigma = inv(self.invSigma + self.M.T @ self.invC @ self.M)
Sigma = inv(inv(self.Sigma) + self.M.T @ inv(self.C) @ self.M)
D0 = self.m + self.M @ self.mu
mu = self.mu + Sigma @ self.M.T @ self.invC @ (D-D0)
mu = self.mu + Sigma @ self.M.T @ inv(self.C) @ (D-D0)
return multivariate_normal(mu, Sigma)

def evidence(self):
Expand Down Expand Up @@ -189,25 +188,15 @@ def reduce(self, D):
-------
ReducedLinearModel
"""
Sigma_L = inv(self.M.T @ self.invC @ self.M)
mu_L = Sigma_L @ self.M.T @ self.invC @ (D-self.m)
logLmax = (- logdet(2 * np.pi * self.C)/2 - (D-self.m) @ self.invC @
(self.C - self.M @ Sigma_L @ self.M.T) @ self.invC @
Sigma_L = inv(self.M.T @ inv(self.C) @ self.M)
mu_L = Sigma_L @ self.M.T @ inv(self.C) @ (D-self.m)
logLmax = (- logdet(2 * np.pi * self.C)/2 - (D-self.m) @ inv(self.C) @
(self.C - self.M @ Sigma_L @ self.M.T) @ inv(self.C) @
(D-self.m)/2)
return ReducedLinearModel(mu_L=mu_L, Sigma_L=Sigma_L, logLmax=logLmax,
mu_pi=self.prior().mean,
Sigma_pi=self.prior().cov)

@cached_property
def invSigma(self):
"""Inverse of prior covariance."""
return inv(self.Sigma)

@cached_property
def invC(self):
"""Inverse of data covariance."""
return inv(self.C)

def _atleast_2d(self, x):
if x is None:
return np.zeros(shape=(0, 0))
Expand Down Expand Up @@ -369,9 +358,9 @@ def DKL(self):
class LinearMixtureModel(object):
"""A linear mixture model.
D|theta, A ~ N( m + M theta, C)
theta|A ~ N( mu, Sigma)
A ~ categorical(exp(logA))
D|theta, A ~ N( m + M theta, C )
theta|A ~ N( mu, Sigma )
A ~ categorical( exp(logA) )
Defined by:
Parameters: theta (n,)
Expand Down Expand Up @@ -516,7 +505,7 @@ def prior(self):
def posterior(self, D):
"""P(theta|D) as a scipy distribution object.
theta|D, A ~ N( mu + S M'C^{-1}(D - m - M mu), S)
theta|D, A ~ N( mu + S M'C^{-1}(D - m - M mu), S )
D|A ~ N( m + M mu, C + M Sigma M' )
A ~ categorical(exp(logA))
S = (Sigma^{-1} + M'C^{-1}M)^{-1}
Expand All @@ -525,11 +514,11 @@ def posterior(self, D):
----------
D : array_like, shape (d,)
"""
Sigma = inv(self.invSigma + np.einsum('iaj,iab,ibk->ijk',
self.M, self.invC, self.M))
Sigma = inv(inv(self.Sigma) + np.einsum('iaj,iab,ibk->ijk',
self.M, inv(self.C), self.M))
D0 = self.m + np.einsum('ija,ia->ij', self.M, self.mu)
mu = self.mu + np.einsum('ija,iba,ibc,ic->ij',
Sigma, self.M, self.invC, D-D0)
Sigma, self.M, inv(self.C), D-D0)
evidence = self.evidence()
logA = (evidence.logpdf(D, reduce=False) + self.logA
- evidence.logpdf(D))
Expand Down Expand Up @@ -562,16 +551,6 @@ def joint(self):
[corr.transpose(0, 2, 1), prior.covs]])
return mixture_multivariate_normal(mu, Sigma, self.logA)

@cached_property
def invSigma(self):
"""Inverse of prior covariance."""
return inv(self.Sigma)

@cached_property
def invC(self):
"""Inverse of data covariance."""
return inv(self.C)

def _atleast_3d(self, x):
if x is None:
return np.zeros(shape=(0, 0, 0))
Expand Down

0 comments on commit 4aeee8f

Please sign in to comment.