From 4aeee8f1ceb3caa8e175feaa73268ee087af292b Mon Sep 17 00:00:00 2001 From: Will Handley Date: Mon, 6 Nov 2023 10:08:21 +0000 Subject: [PATCH] Removing cached properties (#13) * Removed cachedproperties * bump version to 0.5.1 --- README.rst | 2 +- lsbi/_version.py | 2 +- lsbi/model.py | 51 ++++++++++++++---------------------------------- 3 files changed, 17 insertions(+), 38 deletions(-) diff --git a/README.rst b/README.rst index a4c1fc5..2bee394 100644 --- a/README.rst +++ b/README.rst @@ -3,7 +3,7 @@ lsbi: Linear Simulation Based Inference ======================================= :lsbi: Linear Simulation Based Inference :Author: Will Handley -:Version: 0.5.0 +:Version: 0.5.1 :Homepage: https://github.com/handley-lab/lsbi :Documentation: http://lsbi.readthedocs.io/ diff --git a/lsbi/_version.py b/lsbi/_version.py index 2b8877c..93b60a1 100644 --- a/lsbi/_version.py +++ b/lsbi/_version.py @@ -1 +1 @@ -__version__ = '0.5.0' +__version__ = '0.5.1' diff --git a/lsbi/model.py b/lsbi/model.py index 9dc9819..6ae9aae 100644 --- a/lsbi/model.py +++ b/lsbi/model.py @@ -1,6 +1,5 @@ """Gaussian models for linear Bayesian inference.""" import numpy as np -from functools import cached_property from scipy.stats import multivariate_normal from lsbi.stats import mixture_multivariate_normal from numpy.linalg import solve, inv, slogdet @@ -14,8 +13,8 @@ def logdet(A): class LinearModel(object): """A linear model. - D|theta ~ N( m + M theta, C) - theta ~ N( mu, Sigma) + D|theta ~ N( m + M theta, C ) + theta ~ N( mu, Sigma ) Parameters: theta (n,) Data: D (d,) @@ -137,9 +136,9 @@ def posterior(self, D): ---------- D : array_like, shape (d,) """ - Sigma = inv(self.invSigma + self.M.T @ self.invC @ self.M) + Sigma = inv(inv(self.Sigma) + self.M.T @ inv(self.C) @ self.M) D0 = self.m + self.M @ self.mu - mu = self.mu + Sigma @ self.M.T @ self.invC @ (D-D0) + mu = self.mu + Sigma @ self.M.T @ inv(self.C) @ (D-D0) return multivariate_normal(mu, Sigma) def evidence(self): @@ -189,25 +188,15 @@ def reduce(self, D): ------- ReducedLinearModel """ - Sigma_L = inv(self.M.T @ self.invC @ self.M) - mu_L = Sigma_L @ self.M.T @ self.invC @ (D-self.m) - logLmax = (- logdet(2 * np.pi * self.C)/2 - (D-self.m) @ self.invC @ - (self.C - self.M @ Sigma_L @ self.M.T) @ self.invC @ + Sigma_L = inv(self.M.T @ inv(self.C) @ self.M) + mu_L = Sigma_L @ self.M.T @ inv(self.C) @ (D-self.m) + logLmax = (- logdet(2 * np.pi * self.C)/2 - (D-self.m) @ inv(self.C) @ + (self.C - self.M @ Sigma_L @ self.M.T) @ inv(self.C) @ (D-self.m)/2) return ReducedLinearModel(mu_L=mu_L, Sigma_L=Sigma_L, logLmax=logLmax, mu_pi=self.prior().mean, Sigma_pi=self.prior().cov) - @cached_property - def invSigma(self): - """Inverse of prior covariance.""" - return inv(self.Sigma) - - @cached_property - def invC(self): - """Inverse of data covariance.""" - return inv(self.C) - def _atleast_2d(self, x): if x is None: return np.zeros(shape=(0, 0)) @@ -369,9 +358,9 @@ def DKL(self): class LinearMixtureModel(object): """A linear mixture model. - D|theta, A ~ N( m + M theta, C) - theta|A ~ N( mu, Sigma) - A ~ categorical(exp(logA)) + D|theta, A ~ N( m + M theta, C ) + theta|A ~ N( mu, Sigma ) + A ~ categorical( exp(logA) ) Defined by: Parameters: theta (n,) @@ -516,7 +505,7 @@ def prior(self): def posterior(self, D): """P(theta|D) as a scipy distribution object. - theta|D, A ~ N( mu + S M'C^{-1}(D - m - M mu), S) + theta|D, A ~ N( mu + S M'C^{-1}(D - m - M mu), S ) D|A ~ N( m + M mu, C + M Sigma M' ) A ~ categorical(exp(logA)) S = (Sigma^{-1} + M'C^{-1}M)^{-1} @@ -525,11 +514,11 @@ def posterior(self, D): ---------- D : array_like, shape (d,) """ - Sigma = inv(self.invSigma + np.einsum('iaj,iab,ibk->ijk', - self.M, self.invC, self.M)) + Sigma = inv(inv(self.Sigma) + np.einsum('iaj,iab,ibk->ijk', + self.M, inv(self.C), self.M)) D0 = self.m + np.einsum('ija,ia->ij', self.M, self.mu) mu = self.mu + np.einsum('ija,iba,ibc,ic->ij', - Sigma, self.M, self.invC, D-D0) + Sigma, self.M, inv(self.C), D-D0) evidence = self.evidence() logA = (evidence.logpdf(D, reduce=False) + self.logA - evidence.logpdf(D)) @@ -562,16 +551,6 @@ def joint(self): [corr.transpose(0, 2, 1), prior.covs]]) return mixture_multivariate_normal(mu, Sigma, self.logA) - @cached_property - def invSigma(self): - """Inverse of prior covariance.""" - return inv(self.Sigma) - - @cached_property - def invC(self): - """Inverse of data covariance.""" - return inv(self.C) - def _atleast_3d(self, x): if x is None: return np.zeros(shape=(0, 0, 0))