From 283704adbd34b6092b6b710a13c0948980a55100 Mon Sep 17 00:00:00 2001 From: nabr Date: Fri, 26 May 2023 13:42:07 +0200 Subject: [PATCH 1/3] Rename Laplace_diff to LMRF --- README.md | 6 +- cuqi/distribution/__init__.py | 3 +- cuqi/distribution/_laplace_diff.py | 88 --------------- cuqi/distribution/_lmrf.py | 106 +++++++++++++------ cuqi/problem/_problem.py | 18 ++-- cuqi/sampler/_conjugate_approx.py | 8 +- cuqi/sampler/_laplace_approximation.py | 10 +- demos/ExploreTestProblems.ipynb | 6 +- demos/demo00_MinimalExample.py | 6 +- demos/demo04_Deconvolution.ipynb | 4 +- demos/demo15_test_LMRF.py | 2 +- demos/demo23_diagnostics_visual.py | 4 +- demos/demo30_Deconvolution2D.py | 4 +- demos/demo31_callback.py | 4 +- demos/howtos/Deconvolution2D.py | 6 +- demos/try10_Data_Object.py | 2 +- demos/try12_Deconvolution_BayesianProblem.py | 6 +- demos/try14_GMRF.py | 2 +- demos/tutorials/Gibbs.py | 10 +- docs/index.rst | 6 +- tests/test_bayesian_inversion.py | 11 +- tests/test_distribution.py | 8 +- tests/test_sampler.py | 4 +- tests/test_testproblem.py | 2 +- 24 files changed, 138 insertions(+), 188 deletions(-) delete mode 100644 cuqi/distribution/_laplace_diff.py diff --git a/README.md b/README.md index e872a518f..5dfb8fbb3 100644 --- a/README.md +++ b/README.md @@ -28,14 +28,14 @@ import numpy as np import matplotlib.pyplot as plt from cuqi.testproblem import Deconvolution2D from cuqi.data import grains -from cuqi.distribution import Laplace_diff, Gaussian +from cuqi.distribution import LMRF, Gaussian from cuqi.problem import BayesianProblem # Step 1: Model and data, y = Ax A, y_data, info = Deconvolution2D.get_components(dim=128, phantom=grains()) -# Step 2: Prior, x ~ Laplace_diff(0, 0.01) -x = Laplace_diff(location=np.zeros(A.domain_dim), +# Step 2: Prior, x ~ LMRF(0, 0.01) +x = LMRF(location=np.zeros(A.domain_dim), scale=0.01, bc_type='neumann', physical_dim=2) diff --git a/cuqi/distribution/__init__.py b/cuqi/distribution/__init__.py index bdc72a398..6d6a13e45 100644 --- a/cuqi/distribution/__init__.py +++ b/cuqi/distribution/__init__.py @@ -6,9 +6,8 @@ from ._gaussian import Gaussian, JointGaussianSqrtPrec from ._gmrf import GMRF from ._inverse_gamma import InverseGamma -from ._laplace_diff import Laplace_diff -from ._laplace import Laplace from ._lmrf import LMRF +from ._laplace import Laplace from ._lognormal import Lognormal from ._normal import Normal from ._posterior import Posterior diff --git a/cuqi/distribution/_laplace_diff.py b/cuqi/distribution/_laplace_diff.py deleted file mode 100644 index 65ad0fd51..000000000 --- a/cuqi/distribution/_laplace_diff.py +++ /dev/null @@ -1,88 +0,0 @@ -import numpy as np -from cuqi.geometry import _DefaultGeometry, Image2D -from cuqi.operator import FirstOrderFiniteDifference -from cuqi.distribution import Distribution - -class Laplace_diff(Distribution): - """Laplace distribution on the difference between neighboring nodes. - - For 1D `(physical_dim=1)`, the Laplace difference distribution assumes that - - .. math:: - - x_i-x_{i-1} \sim \mathrm{Laplace}(0, b), - - where :math:`b` is the scale parameter. - - For 2D `(physical_dim=2)` the differences are defined in both horizontal and vertical directions. - - It is possible to define boundary conditions using the `bc_type` parameter. - - The location parameter is a shift of the :math:`\mathbf{x}`. - - Parameters - ---------- - location : scalar or ndarray - The location parameter of the distribution. - - scale : scalar - The scale parameter of the distribution. - - bc_type : string - The boundary conditions of the difference operator. - - physical_dim : int - The physical dimension of what the distribution represents (can take the values 1 or 2). - - Example - ------- - .. code-block:: python - - import cuqi - import numpy as np - prior = cuqi.distribution.Laplace_diff(location=np.zeros(128), scale=0.1) - - """ - def __init__(self, location, scale, bc_type="zero", physical_dim=1, **kwargs): - # Init from abstract distribution class - super().__init__(**kwargs) - - self.location = location - self.scale = scale - self._bc_type = bc_type - self._physical_dim = physical_dim - - if physical_dim == 2: - N = int(np.sqrt(self.dim)) - num_nodes = (N, N) - if isinstance(self.geometry, _DefaultGeometry): - self.geometry = Image2D(num_nodes) - - elif physical_dim == 1: - num_nodes = self.dim - else: - raise ValueError("Only physical dimension 1 or 2 supported.") - - self._diff_op = FirstOrderFiniteDifference(num_nodes=num_nodes, bc_type=bc_type) - - # Check if location parameter is non-zero vector (not supported) - if callable(location) or np.linalg.norm(location) > 0: - raise ValueError("Non-zero location parameter not supported.") - - def pdf(self, x): - Dx = self._diff_op @ (x-self.location) # np.diff(X) - return (1/(2*self.scale))**(len(Dx)) * np.exp(-np.linalg.norm(Dx, ord=1, axis=0)/self.scale) - - def logpdf(self, x): - Dx = self._diff_op @ (x-self.location) - return len(Dx)*(-(np.log(2)+np.log(self.scale))) - np.linalg.norm(Dx, ord=1, axis=0)/self.scale - - def _sample(self,N=1,rng=None): - raise NotImplementedError("'Laplace_diff.sample' is not implemented. Sampling can be performed with the 'sampler' module.") - - # def cdf(self, x): # TODO - # return 1/2 + 1/2*np.sign(x-self.loc)*(1-np.exp(-np.linalg.norm(x, ord=1, axis=0)/self.scale)) - - # def sample(self): # TODO - # p = np.random.rand(self.dim) - # return self.loc - self.scale*np.sign(p-1/2)*np.log(1-2*abs(p-1/2)) diff --git a/cuqi/distribution/_lmrf.py b/cuqi/distribution/_lmrf.py index f735c4998..df8e26868 100644 --- a/cuqi/distribution/_lmrf.py +++ b/cuqi/distribution/_lmrf.py @@ -1,44 +1,88 @@ -from cuqi.distribution import Distribution -from cuqi.operator import FirstOrderFiniteDifference import numpy as np +from cuqi.geometry import _DefaultGeometry, Image2D +from cuqi.operator import FirstOrderFiniteDifference +from cuqi.distribution import Distribution class LMRF(Distribution): - """ - Parameters - ---------- - partition_size : int - The dimension of the distribution in one physical dimension. + """Laplace distribution on the difference between neighboring nodes. + + For 1D `(physical_dim=1)`, the Laplace difference distribution assumes that + + .. math:: + + x_i-x_{i-1} \sim \mathrm{Laplace}(0, b), + + where :math:`b` is the scale parameter. + + For 2D `(physical_dim=2)` the differences are defined in both horizontal and vertical directions. + + It is possible to define boundary conditions using the `bc_type` parameter. + + The location parameter is a shift of the :math:`\mathbf{x}`. - physical_dim : int - The physical dimension of what the distribution represents (can take the values 1 or 2). + Parameters + ---------- + location : scalar or ndarray + The location parameter of the distribution. + + scale : scalar + The scale parameter of the distribution. + + bc_type : string + The boundary conditions of the difference operator. + + physical_dim : int + The physical dimension of what the distribution represents (can take the values 1 or 2). + + Example + ------- + .. code-block:: python + + import cuqi + import numpy as np + prior = cuqi.distribution.LMRF(location=np.zeros(128), scale=0.1) + """ - - def __init__(self, mean, prec, partition_size, physical_dim, bc_type, **kwargs): - super().__init__(**kwargs) - self.mean = mean.reshape(len(mean), 1) - self.prec = prec - self._partition_size = partition_size # partition size - self._bc_type = bc_type # boundary conditions + def __init__(self, location, scale, bc_type="zero", physical_dim=1, **kwargs): + # Init from abstract distribution class + super().__init__(**kwargs) + + self.location = location + self.scale = scale + self._bc_type = bc_type self._physical_dim = physical_dim - if physical_dim == 1: - num_nodes = (partition_size,) + + if physical_dim == 2: + N = int(np.sqrt(self.dim)) + num_nodes = (N, N) + if isinstance(self.geometry, _DefaultGeometry): + self.geometry = Image2D(num_nodes) + + elif physical_dim == 1: + num_nodes = self.dim else: - num_nodes = (partition_size,partition_size) + raise ValueError("Only physical dimension 1 or 2 supported.") + + self._diff_op = FirstOrderFiniteDifference(num_nodes=num_nodes, bc_type=bc_type) - self._diff_op = FirstOrderFiniteDifference( num_nodes, bc_type= bc_type) + # Check if location parameter is non-zero vector (not supported) + if callable(location) or np.linalg.norm(location) > 0: + raise ValueError("Non-zero location parameter not supported.") - @property - def dim(self): - return self._diff_op.dim + def pdf(self, x): + Dx = self._diff_op @ (x-self.location) # np.diff(X) + return (1/(2*self.scale))**(len(Dx)) * np.exp(-np.linalg.norm(Dx, ord=1, axis=0)/self.scale) def logpdf(self, x): + Dx = self._diff_op @ (x-self.location) + return len(Dx)*(-(np.log(2)+np.log(self.scale))) - np.linalg.norm(Dx, ord=1, axis=0)/self.scale - if self._physical_dim == 1 or self._physical_dim == 2: - const = self.dim *(np.log(self.prec)-np.log(2)) - y = const - self.prec*(np.linalg.norm(self._diff_op@x, ord=1)) - else: - raise NotImplementedError - return y + def _sample(self,N=1,rng=None): + raise NotImplementedError("'LMRF.sample' is not implemented. Sampling can be performed with the 'sampler' module.") + + # def cdf(self, x): # TODO + # return 1/2 + 1/2*np.sign(x-self.loc)*(1-np.exp(-np.linalg.norm(x, ord=1, axis=0)/self.scale)) - def _sample(self, N): - raise NotImplementedError \ No newline at end of file + # def sample(self): # TODO + # p = np.random.rand(self.dim) + # return self.loc - self.scale*np.sign(p-1/2)*np.log(1-2*abs(p-1/2)) diff --git a/cuqi/problem/_problem.py b/cuqi/problem/_problem.py index 9a6a8ac7c..d208fe152 100644 --- a/cuqi/problem/_problem.py +++ b/cuqi/problem/_problem.py @@ -5,7 +5,7 @@ import cuqi from cuqi import config -from cuqi.distribution import Distribution, Gaussian, InverseGamma, Laplace_diff, GMRF, Lognormal, Posterior, LMRF, Beta, JointDistribution, Gamma, Cauchy_diff +from cuqi.distribution import Distribution, Gaussian, InverseGamma, LMRF, GMRF, Lognormal, Posterior, Beta, JointDistribution, Gamma, Cauchy_diff from cuqi.density import Density from cuqi.model import LinearModel, Model from cuqi.likelihood import Likelihood @@ -330,8 +330,8 @@ def sample_posterior(self, Ns, callback=None) -> cuqi.samples.Samples: elif hasattr(self.prior,"sqrtprecTimesMean") and hasattr(self.likelihood.distribution,"sqrtprec") and isinstance(self.model,LinearModel): return self._sampleLinearRTO(Ns, callback) - # For Laplace_diff we use our awesome unadjusted Laplace approximation! - elif self._check_posterior(self, Laplace_diff, Gaussian): + # For LMRF we use our awesome unadjusted Laplace approximation! + elif self._check_posterior(self, LMRF, Gaussian): return self._sampleUGLA(Ns, callback) # If we have gradients, use NUTS! @@ -343,10 +343,6 @@ def sample_posterior(self, Ns, callback=None) -> cuqi.samples.Samples: elif self._check_posterior(self, (Gaussian, GMRF), Gaussian): return self._samplepCN(Ns, callback) - # For the remainder of valid cases we use CWMH - elif self._check_posterior(self, LMRF): - return self._sampleCWMH(Ns, callback) - else: raise NotImplementedError(f"Automatic sampler choice is not implemented for model: {type(self.model)}, likelihood: {type(self.likelihood.distribution)} and prior: {type(self.prior)} and dim {self.prior.dim}. Manual sampler choice can be done via the 'sampler' module. Posterior distribution can be extracted via '.posterior' of any testproblem (BayesianProblem).") @@ -717,16 +713,16 @@ def _determine_sampling_strategy(self): if self._check_posterior(cond_target, Gamma, (Gaussian, GMRF)): sampling_strategy[par_name] = cuqi.sampler.Conjugate - # Gamma prior, Laplace_diff likelihood -> ConjugateApprox - elif self._check_posterior(cond_target, Gamma, Laplace_diff): + # Gamma prior, LMRF likelihood -> ConjugateApprox + elif self._check_posterior(cond_target, Gamma, LMRF): sampling_strategy[par_name] = cuqi.sampler.ConjugateApprox # Gaussian prior, Gaussian likelihood, Linear model -> Linear_RTO elif self._check_posterior(cond_target, (Gaussian, GMRF), Gaussian, LinearModel): sampling_strategy[par_name] = cuqi.sampler.Linear_RTO - # Laplace_diff prior, Gaussian likelihood, Linear model -> UGLA - elif self._check_posterior(cond_target, Laplace_diff, Gaussian, LinearModel): + # LMRF prior, Gaussian likelihood, Linear model -> UGLA + elif self._check_posterior(cond_target, LMRF, Gaussian, LinearModel): sampling_strategy[par_name] = cuqi.sampler.UGLA else: diff --git a/cuqi/sampler/_conjugate_approx.py b/cuqi/sampler/_conjugate_approx.py index 0175e0cb2..1513e2df4 100644 --- a/cuqi/sampler/_conjugate_approx.py +++ b/cuqi/sampler/_conjugate_approx.py @@ -1,4 +1,4 @@ -from cuqi.distribution import Posterior, Laplace_diff, Gamma +from cuqi.distribution import Posterior, LMRF, Gamma import numpy as np import scipy as sp @@ -9,7 +9,7 @@ class ConjugateApprox: # TODO: Subclass from Sampler once updated by a conjugate pair. Currently supported pairs are: - - (Laplace_diff, Gamma): Approximated by (Gaussian, Gamma) + - (LMRF, Gamma): Approximated by (Gaussian, Gamma) For more information on conjugate pairs, see https://en.wikipedia.org/wiki/Conjugate_prior. @@ -17,7 +17,7 @@ class ConjugateApprox: # TODO: Subclass from Sampler once updated def __init__(self, target: Posterior): - if not isinstance(target.likelihood.distribution, Laplace_diff): + if not isinstance(target.likelihood.distribution, LMRF): raise ValueError("Conjugate sampler only works with Laplace diff likelihood function") if not isinstance(target.prior, Gamma): raise ValueError("Conjugate sampler only works with Gamma prior") @@ -31,7 +31,7 @@ def step(self, x=None): D = self.target.likelihood.distribution._diff_op n = D.shape[0] - # Gaussian approximation of Laplace_diff prior as function of x_k + # Gaussian approximation of LMRF prior as function of x_k # See Uribe et al. (2022) for details # Current has a zero mean assumption on likelihood! TODO beta=1e-5 diff --git a/cuqi/sampler/_laplace_approximation.py b/cuqi/sampler/_laplace_approximation.py index e34db33dd..c13a29a65 100644 --- a/cuqi/sampler/_laplace_approximation.py +++ b/cuqi/sampler/_laplace_approximation.py @@ -12,7 +12,7 @@ class UGLA(Sampler): Samples an approximate posterior where the prior is approximated by a Gaussian distribution. The likelihood must be Gaussian. - Currently only works for Laplace_diff priors. + Currently only works for LMRF priors. The inner solver is Conjugate Gradient Least Squares (CGLS) solver. @@ -68,9 +68,9 @@ def __init__(self, target, x0=None, maxit=50, tol=1e-4, beta=1e-5, rng=None, **k if not hasattr(self.target.likelihood.distribution, "sqrtprec"): raise TypeError("Distribution in Likelihood must contain a sqrtprec attribute") - # Check that prior is Laplace_diff - if not isinstance(self.target.prior, cuqi.distribution.Laplace_diff): - raise ValueError('Unadjusted Gaussian Laplace approximation (UGLA) requires Laplace_diff prior') + # Check that prior is LMRF + if not isinstance(self.target.prior, cuqi.distribution.LMRF): + raise ValueError('Unadjusted Gaussian Laplace approximation (UGLA) requires LMRF prior') # Modify initial guess since Sampler sets it to ones. if x0 is not None: @@ -115,7 +115,7 @@ def _sample(self, Ns, Nb): D = self.target.prior._diff_op n = D.shape[0] - # Gaussian approximation of Laplace_diff prior as function of x_k + # Gaussian approximation of LMRF prior as function of x_k def Lk_fun(x_k): dd = 1/np.sqrt((D @ x_k)**2 + self.beta*np.ones(n)) W = sp.sparse.diags(dd) diff --git a/demos/ExploreTestProblems.ipynb b/demos/ExploreTestProblems.ipynb index 9d0b4e6d6..bc3257162 100644 --- a/demos/ExploreTestProblems.ipynb +++ b/demos/ExploreTestProblems.ipynb @@ -40,7 +40,7 @@ "metadata": {}, "outputs": [], "source": [ - "from cuqi.distribution import Gaussian, Laplace_diff\n", + "from cuqi.distribution import Gaussian, LMRF\n", "from cuqi.problem import BayesianProblem\n", "from cuqi.testproblem import Deconvolution1D, Deconv_1D, Heat_1D, Poisson_1D, Abel_1D #, Deblur\n", "from cuqi.array import CUQIarray" @@ -321,7 +321,7 @@ "source": [ "It is straightforward to change components of the BayesianProblem. For example if we want to experiment with a different prior we can easily swap it out.\n", "\n", - "We specify a `Laplace_diff` prior, which is a Laplace distribution on differences between neighbouring function values:" + "We specify a `LMRF` prior, which is a Laplace distribution on differences between neighbouring function values:" ] }, { @@ -330,7 +330,7 @@ "metadata": {}, "outputs": [], "source": [ - "prior_lap = Laplace_diff(location=np.zeros(model.domain_dim), scale=0.01, bc_type='zero')" + "prior_lap = LMRF(location=np.zeros(model.domain_dim), scale=0.01, bc_type='zero')" ] }, { diff --git a/demos/demo00_MinimalExample.py b/demos/demo00_MinimalExample.py index 78fbeb691..f1735b818 100644 --- a/demos/demo00_MinimalExample.py +++ b/demos/demo00_MinimalExample.py @@ -4,7 +4,7 @@ import numpy as np import cuqi from cuqi.model import LinearModel -from cuqi.distribution import Gaussian, Laplace_diff, Cauchy_diff, Gamma +from cuqi.distribution import Gaussian, LMRF, Cauchy_diff, Gamma from cuqi.problem import BayesianProblem # %% Minimal example @@ -34,7 +34,7 @@ # switch prior # Set up Bayesian model for inverse problem A = LinearModel(Amat) # y = Ax. Model for inverse problem -x = Laplace_diff(np.zeros(n), 0.01, bc_type='zero') # x ~ Laplace_diff(0,0.01), Zero BC +x = LMRF(np.zeros(n), 0.01, bc_type='zero') # x ~ LMRF(0,0.01), Zero BC y = Gaussian(A@x, 0.05) # y ~ N(Ax,0.05) IP = BayesianProblem(y, x).set_data(y=y_data) # Bayesian problem given observed data samples = IP.UQ(exact=x_exact) # Run UQ analysis @@ -74,7 +74,7 @@ A = LinearModel(Amat) # y = Ax. Model for inverse problem d = Gamma(1, 1e-2) # d ~ Gamma(1, 10^-2) l = Gamma(1, 1e-2) # l ~ Gamma(1, 10^-2) -x = Laplace_diff(np.zeros(n), lambda d: 1/d) # x ~ Laplace_diff(0, d^{-1}), Zero BC +x = LMRF(np.zeros(n), lambda d: 1/d) # x ~ LMRF(0, d^{-1}), Zero BC y = Gaussian(A@x, cov=lambda l: 1/l) # y ~ N(Ax, l^-1) IP = BayesianProblem(y, x, d, l).set_data(y=y_data) # Bayesian problem given observed data samples = IP.UQ(Ns = 1000, exact={"x":x_exact, "l":400}) # Run UQ analysis diff --git a/demos/demo04_Deconvolution.ipynb b/demos/demo04_Deconvolution.ipynb index 9284d3b60..298857f04 100644 --- a/demos/demo04_Deconvolution.ipynb +++ b/demos/demo04_Deconvolution.ipynb @@ -47,7 +47,7 @@ "source": [ "from cuqi.testproblem import Deconvolution1D\n", "from cuqi.model import LinearModel\n", - "from cuqi.distribution import Gaussian, Laplace_diff, Cauchy_diff\n", + "from cuqi.distribution import Gaussian, LMRF, Cauchy_diff\n", "from cuqi.sampler import CWMH\n", "from cuqi.problem import BayesianProblem\n", "from cuqi.samples import Samples" @@ -632,7 +632,7 @@ "loc = np.zeros(dim)\n", "delta = 0.5\n", "scale = delta*1/dim\n", - "IP.prior = Laplace_diff(loc, scale, 'zero', name=\"x\")" + "IP.prior = LMRF(loc, scale, 'zero', name=\"x\")" ] }, { diff --git a/demos/demo15_test_LMRF.py b/demos/demo15_test_LMRF.py index 6748cb5f8..201fc0ce4 100644 --- a/demos/demo15_test_LMRF.py +++ b/demos/demo15_test_LMRF.py @@ -12,7 +12,7 @@ prec = 1.0 dom = 1 BCs = 'neumann' -x = cuqi.distribution.LMRF(mean, prec, N, dom, BCs) +x = cuqi.distribution.LMRF(mean, prec, BCs) print(x.logd(np.array([3,4]))) #%% location = 0 diff --git a/demos/demo23_diagnostics_visual.py b/demos/demo23_diagnostics_visual.py index b31a7573f..b9d30143c 100644 --- a/demos/demo23_diagnostics_visual.py +++ b/demos/demo23_diagnostics_visual.py @@ -25,7 +25,7 @@ # %% Try the plotting tools with 4 variable distribution import numpy as np #dist4 = cuqi.distribution.Gaussian(np.array([1,2,3,4]),1) -dist4 = cuqi.distribution.LMRF(np.array([1,2,3,4,5,6]), 1, 6, 1, 'zero') +dist4 = cuqi.distribution.LMRF(np.zeros(6), 1, 'zero') sampler4 = cuqi.sampler.MetropolisHastings(dist4) samples4 = sampler4.sample_adapt(50000) samples4.geometry = cuqi.geometry.Discrete(["a","b","c","d","e","f"]) @@ -45,4 +45,4 @@ # %% ax_4 = samples4_bt.plot_autocorrelation() # %% -ax_22 = samples4_bt.plot_autocorrelation(np.arange(6), grid=(3,2)) \ No newline at end of file +ax_22 = samples4_bt.plot_autocorrelation(np.arange(6), grid=(3,2)) diff --git a/demos/demo30_Deconvolution2D.py b/demos/demo30_Deconvolution2D.py index f5b572f90..fac37885e 100644 --- a/demos/demo30_Deconvolution2D.py +++ b/demos/demo30_Deconvolution2D.py @@ -47,8 +47,8 @@ cov=1 ) -# Laplace_diff prior (promotes piece-wise constant regions) -TP.prior = cuqi.distribution.Laplace_diff( +# LMRF prior (promotes piece-wise constant regions) +TP.prior = cuqi.distribution.LMRF( location=np.zeros(TP.model.domain_dim), scale=0.01, physical_dim=2 # Indicates that the prior should be in 2D. diff --git a/demos/demo31_callback.py b/demos/demo31_callback.py index aeac0ef7b..8f30e11b7 100644 --- a/demos/demo31_callback.py +++ b/demos/demo31_callback.py @@ -13,7 +13,7 @@ TP = cuqi.testproblem.Deconvolution2D(phantom=cuqi.data.grains()) # Define prior -TP.prior = cuqi.distribution.Laplace_diff(location=np.zeros(TP.model.domain_dim), +TP.prior = cuqi.distribution.LMRF(location=np.zeros(TP.model.domain_dim), scale=0.01, bc_type="neumann", physical_dim=2, @@ -107,7 +107,7 @@ def callback(sample, sample_index): # Define test problem and prior TP = cuqi.testproblem.Deconvolution1D(phantom="square") # Default values -TP.prior = cuqi.distribution.Laplace_diff(np.zeros(TP.model.domain_dim), 0.01) # Set prior +TP.prior = cuqi.distribution.LMRF(np.zeros(TP.model.domain_dim), 0.01) # Set prior # Create callback function for progress plotting (Burn-in is 20% by default so we allocate 120% of samples) callback = make_callback_function(fig, TP, int(1.2*Ns)) diff --git a/demos/howtos/Deconvolution2D.py b/demos/howtos/Deconvolution2D.py index 8569b03ec..7fba6ec59 100644 --- a/demos/howtos/Deconvolution2D.py +++ b/demos/howtos/Deconvolution2D.py @@ -9,7 +9,7 @@ import numpy as np from cuqi.testproblem import Deconvolution2D -from cuqi.distribution import Gaussian, Laplace_diff +from cuqi.distribution import Gaussian, LMRF from cuqi.problem import BayesianProblem # %% # Step 1: Deterministic model @@ -48,10 +48,10 @@ # where :math:`\delta` is the scale parameter defining how likely jumps from one pixel value # to another are in the horizontal and vertical directions. # -# This distribution comes pre-defined in CUQIpy as the :class:`cuqi.distribution.Laplace_diff`. +# This distribution comes pre-defined in CUQIpy as the :class:`cuqi.distribution.LMRF`. # Notice we have to specify the physical dimensions of the unknown. -x = Laplace_diff(location=np.zeros(A.domain_dim), scale=0.1, physical_dim=2) +x = LMRF(location=np.zeros(A.domain_dim), scale=0.1, physical_dim=2) # %% # Step 3: Likelihood model diff --git a/demos/try10_Data_Object.py b/demos/try10_Data_Object.py index 3567c9cae..f8a51ebbe 100644 --- a/demos/try10_Data_Object.py +++ b/demos/try10_Data_Object.py @@ -14,7 +14,7 @@ import cuqi from cuqi.testproblem import Deconvolution1D from cuqi.model import LinearModel -from cuqi.distribution import Gaussian, Laplace_diff, Cauchy_diff +from cuqi.distribution import Gaussian, LMRF, Cauchy_diff from cuqi.sampler import CWMH from cuqi.problem import BayesianProblem from cuqi.samples import Samples diff --git a/demos/try12_Deconvolution_BayesianProblem.py b/demos/try12_Deconvolution_BayesianProblem.py index 017c18709..6a32aa5d5 100644 --- a/demos/try12_Deconvolution_BayesianProblem.py +++ b/demos/try12_Deconvolution_BayesianProblem.py @@ -6,7 +6,7 @@ import numpy as np import matplotlib.pyplot as plt -from cuqi.distribution import Gaussian, Cauchy_diff, Laplace_diff +from cuqi.distribution import Gaussian, Cauchy_diff, LMRF from cuqi.distribution import GMRF, LMRF, Laplace, Beta, InverseGamma, Lognormal from cuqi.sampler import NUTS, CWMH @@ -24,13 +24,13 @@ if ndim == 1: Ns = 1000 if ndim == 2: Ns = 500 -# %% Prior choices (Main ones of interest: Gaussian, GMR, Cauchy_diff, Laplace_diff: +# %% Prior choices (Main ones of interest: Gaussian, GMR, Cauchy_diff, LMRF: # Working choices #TP.prior = Gaussian(mean=np.zeros(n), cov=par**2, geometry=TP.model.domain_geometry) #TP.prior = GMRF(np.zeros(n), 1/par**2, ndim, "zero", geometry=TP.model.domain_geometry) # Odd behavior (swingy?) TP.prior = Cauchy_diff(location=np.zeros(n), scale=0.01, bc_type="zero", physical_dim=ndim, geometry=TP.model.domain_geometry) -#TP.prior = Laplace_diff(location=np.zeros(n), scale=0.01, bc_type="neumann", physical_dim=ndim, geometry=TP.model.domain_geometry) +#TP.prior = LMRF(location=np.zeros(n), scale=0.01, bc_type="neumann", physical_dim=ndim, geometry=TP.model.domain_geometry) # Bad choices (ignore) both 1D and 2D #TP.prior = Beta(2*np.ones(n), 5*np.ones(n)) #Might need tuning diff --git a/demos/try14_GMRF.py b/demos/try14_GMRF.py index e5eede4df..827d154d2 100644 --- a/demos/try14_GMRF.py +++ b/demos/try14_GMRF.py @@ -56,7 +56,7 @@ samples.plot_ci(exact=TP.exactSolution) # %% -TP.prior = cuqi.distribution.Laplace_diff( +TP.prior = cuqi.distribution.LMRF( location=np.zeros(TP.model.domain_dim), scale=0.001, physical_dim=2 diff --git a/demos/tutorials/Gibbs.py b/demos/tutorials/Gibbs.py index 0815e2701..b2f7d4aa0 100644 --- a/demos/tutorials/Gibbs.py +++ b/demos/tutorials/Gibbs.py @@ -34,7 +34,7 @@ import numpy as np import matplotlib.pyplot as plt from cuqi.testproblem import Deconvolution1D -from cuqi.distribution import Gaussian, Gamma, JointDistribution, GMRF, Laplace_diff +from cuqi.distribution import Gaussian, Gamma, JointDistribution, GMRF, LMRF from cuqi.sampler import Gibbs, Linear_RTO, Conjugate, UGLA, ConjugateApprox np.random.seed(0) @@ -204,19 +204,19 @@ # # .. math:: # -# \mathbf{x} \sim \text{Laplace_diff}(\mathbf{0}, d^{-1}), +# \mathbf{x} \sim \text{LMRF}(\mathbf{0}, d^{-1}), # # which means that :math:`x_i-x_{i-1} \sim \mathrm{Laplace}(0, d^{-1})`. # -# This prior is implemented in CUQIpy as the ``Laplace_diff`` distribution. +# This prior is implemented in CUQIpy as the ``LMRF`` distribution. # To update our model we simply need to replace the ``GMRF`` distribution -# with the ``Laplace_diff`` distribution. Note that the Laplace distribution +# with the ``LMRF`` distribution. Note that the Laplace distribution # is defined via a scale parameter, so we invert the parameter :math:`d`. # # This laplace distribution and new posterior can be defined as follows: # Define new distribution for x -x = Laplace_diff(np.zeros(n), lambda d: 1/d) +x = LMRF(np.zeros(n), lambda d: 1/d) # Define new joint distribution with piecewise constant prior joint_Ld = JointDistribution(d, l, x, y) diff --git a/docs/index.rst b/docs/index.rst index dbc72593a..520341d9c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -121,14 +121,14 @@ Image deconvolution with uncertainty quantification import matplotlib.pyplot as plt from cuqi.testproblem import Deconvolution2D from cuqi.data import grains - from cuqi.distribution import Laplace_diff, Gaussian + from cuqi.distribution import LMRF, Gaussian from cuqi.problem import BayesianProblem # Step 1: Model and data, y = Ax A, y_data, info = Deconvolution2D.get_components(dim=128, phantom=grains()) - # Step 2: Prior, x ~ Laplace_diff(0, 0.01) - x = Laplace_diff(location=np.zeros(A.domain_dim), + # Step 2: Prior, x ~ LMRF(0, 0.01) + x = LMRF(location=np.zeros(A.domain_dim), scale=0.01, bc_type='neumann', physical_dim=2) diff --git a/tests/test_bayesian_inversion.py b/tests/test_bayesian_inversion.py index fc3db3a3f..7f9fb6349 100644 --- a/tests/test_bayesian_inversion.py +++ b/tests/test_bayesian_inversion.py @@ -5,7 +5,7 @@ import sys from cuqi.testproblem import Deconvolution1D -from cuqi.distribution import Gaussian, GMRF, Cauchy_diff, Laplace_diff, LMRF, Gamma +from cuqi.distribution import Gaussian, GMRF, Cauchy_diff, LMRF, Gamma from cuqi.problem import BayesianProblem from cuqi.density import Density @@ -14,8 +14,7 @@ [ (Deconvolution1D, "gauss", Gaussian(np.zeros(128), 0.071**2), 20), (Deconvolution1D, "gauss", GMRF(np.zeros(128), 100, 1, "zero"), 20), - (Deconvolution1D, "square", LMRF(np.zeros(128), 100, 128, 1, "zero"), 100), - (Deconvolution1D, "square", Laplace_diff(np.zeros(128), 0.005), 100), + (Deconvolution1D, "square", LMRF(np.zeros(128), 0.005), 100), (Deconvolution1D, "square", Cauchy_diff(np.zeros(128), 0.01), 50), ]) def test_TP_BayesianProblem_sample(copy_reference, TP_type, phantom, prior, Ns): @@ -43,7 +42,7 @@ def test_TP_BayesianProblem_sample(copy_reference, TP_type, phantom, prior, Ns): # Load reference file into temp folder and load ref_fname = f"{TP_type.__name__}_{phantom}_{prior.__class__.__name__}_{Ns}" - #if isinstance(prior, Laplace_diff): #Put the case you want to update for here. + #if isinstance(prior, LMRF): #Put the case you want to update for here. # np.savez(ref_fname, median=med_xpos, sigma=sigma_xpos, lo95=lo95, up95=up95) #uncomment to update ref_file = copy_reference(f"data/{ref_fname}.npz") ref = np.load(ref_file) @@ -86,12 +85,12 @@ def test_TP_BayesianProblem_sample(copy_reference, TP_type, phantom, prior, Ns): ], 50 ), - # Case 2: Laplace_diff with Gamma hyperpriors on both noise and prior precision + # Case 2: LMRF with Gamma hyperpriors on both noise and prior precision ( Deconvolution1D, "square", [ - Laplace_diff(np.zeros(128), lambda d: 1/d, name="x"), + LMRF(np.zeros(128), lambda d: 1/d, name="x"), Gamma(1, 1e-4, name="l"), Gamma(1, 1e-4, name="d") ], diff --git a/tests/test_distribution.py b/tests/test_distribution.py index f700318a2..e7a5e6438 100644 --- a/tests/test_distribution.py +++ b/tests/test_distribution.py @@ -629,13 +629,13 @@ def test_Cauchy_diff_should_not_allow_non_zero_location(): with pytest.raises(ValueError): cuqi.distribution.Cauchy_diff(lambda x: x, 1) -def test_Laplace_diff_should_not_allow_non_zero_location(): - """" Laplace_diff should not allow non-zero location. """ +def test_LMRF_should_not_allow_non_zero_location(): + """" LMRF should not allow non-zero location. """ with pytest.raises(ValueError): - cuqi.distribution.Laplace_diff(np.ones(5), 1) + cuqi.distribution.LMRF(np.ones(5), 1) with pytest.raises(ValueError): - cuqi.distribution.Laplace_diff(lambda x: x, 1) + cuqi.distribution.LMRF(lambda x: x, 1) def test_Cauchy_against_scipy(): """ Test Cauchy distribution logpdf, cdf, pdf, gradient """ diff --git a/tests/test_sampler.py b/tests/test_sampler.py index ec9fceede..ecb8b10d1 100644 --- a/tests/test_sampler.py +++ b/tests/test_sampler.py @@ -3,7 +3,7 @@ import sys -from cuqi.distribution import Gaussian, Cauchy_diff, Gaussian, Laplace_diff, GMRF, LMRF +from cuqi.distribution import Gaussian, Cauchy_diff, Gaussian, LMRF, GMRF from cuqi.sampler import pCN import pytest @@ -358,7 +358,7 @@ def test_MALA_regression(copy_reference): (Gaussian(np.zeros(128), 0.1), "_sampleNUTS", np.arange(1,12)), # 20% burn-in + initial guess (Gaussian(np.zeros(128), 0.1), "_samplepCN", np.arange(1,12)), # 20% burn-in + initial guess (Gaussian(np.zeros(128), 0.1), "_sampleCWMH", np.arange(1,12)), # 20% burn-in + initial guess - (Laplace_diff(np.zeros(128), 0.1),"_sampleUGLA", np.arange(1,12)), # 20% burn-in + initial guess + (LMRF(np.zeros(128), 0.1),"_sampleUGLA", np.arange(1,12)), # 20% burn-in + initial guess ]) def test_TP_callback(prior, sample_method, expected): """ Test that the callback function is called with the correct sample index by comparing to the expected output. diff --git a/tests/test_testproblem.py b/tests/test_testproblem.py index 24b19d3fe..aa83e1883 100644 --- a/tests/test_testproblem.py +++ b/tests/test_testproblem.py @@ -150,7 +150,7 @@ def test_Abel(): #TODO. Add tests for custom PSF @pytest.mark.parametrize("prior",[ (cuqi.distribution.Gaussian(np.zeros(128**2), 1, name="x")), - #(cuqi.distribution.Laplace_diff(np.zeros(128**2), 1, "zeros")), + #(cuqi.distribution.LMRF(np.zeros(128**2), 1, "zeros")), #(cuqi.distribution.Cauchy_diff(np.zeros(128**2), 1, "zeros")), ]) def test_Deconvolution2D_Sampling_prior(prior): From 23e13497ed69ac59b038739717559f586fd46e73 Mon Sep 17 00:00:00 2001 From: nabr Date: Fri, 26 May 2023 13:50:28 +0200 Subject: [PATCH 2/3] Update name for cauchy_diff to CMRF --- cuqi/distribution/__init__.py | 2 +- cuqi/distribution/{_cauchy_diff.py => _cmrf.py} | 8 ++++---- cuqi/problem/_problem.py | 4 ++-- demos/demo00_MinimalExample.py | 6 +++--- demos/demo03_BayesianInversion.py | 2 +- demos/demo04_Deconvolution.ipynb | 4 ++-- demos/demo12_NUTS.py | 2 +- demos/demo12_gradients.py | 2 +- demos/demo27_ULA.py | 2 +- demos/demo28_MALA.py | 2 +- demos/try03_Cauchy_MCMC.py | 2 +- demos/try10_Data_Object.py | 2 +- demos/try12_Deconvolution_BayesianProblem.py | 6 +++--- ...50.npz => Deconvolution1D_square_CMRF_50.npz} | Bin .../Deconvolution1D_square_Laplace_diff_100.npz | Bin 5084 -> 0 bytes tests/test_bayesian_inversion.py | 8 ++++---- tests/test_distribution.py | 8 ++++---- tests/test_sampler.py | 8 ++++---- tests/test_testproblem.py | 2 +- 19 files changed, 35 insertions(+), 35 deletions(-) rename cuqi/distribution/{_cauchy_diff.py => _cmrf.py} (91%) rename tests/data/{Deconvolution1D_square_Cauchy_diff_50.npz => Deconvolution1D_square_CMRF_50.npz} (100%) delete mode 100644 tests/data/Deconvolution1D_square_Laplace_diff_100.npz diff --git a/cuqi/distribution/__init__.py b/cuqi/distribution/__init__.py index 6d6a13e45..ba33b167d 100644 --- a/cuqi/distribution/__init__.py +++ b/cuqi/distribution/__init__.py @@ -1,7 +1,7 @@ from ._distribution import Distribution from ._beta import Beta from ._cauchy import Cauchy -from ._cauchy_diff import Cauchy_diff +from ._cmrf import CMRF from ._gamma import Gamma from ._gaussian import Gaussian, JointGaussianSqrtPrec from ._gmrf import GMRF diff --git a/cuqi/distribution/_cauchy_diff.py b/cuqi/distribution/_cmrf.py similarity index 91% rename from cuqi/distribution/_cauchy_diff.py rename to cuqi/distribution/_cmrf.py index 545e8397a..dddbca849 100644 --- a/cuqi/distribution/_cauchy_diff.py +++ b/cuqi/distribution/_cmrf.py @@ -5,7 +5,7 @@ from cuqi.operator import FirstOrderFiniteDifference from cuqi.geometry import _DefaultGeometry, Image2D, _get_identity_geometries -class Cauchy_diff(Distribution): +class CMRF(Distribution): """Cauchy distribution on the difference between neighboring nodes. For 1D `(physical_dim=1)`, the Cauchy difference distribution assumes that @@ -42,7 +42,7 @@ class Cauchy_diff(Distribution): import cuqi import numpy as np - prior = cuqi.distribution.Cauchy_diff(location=np.zeros(128), scale=0.1) + prior = cuqi.distribution.CMRF(location=np.zeros(128), scale=0.1) """ @@ -60,7 +60,7 @@ def __init__(self, location, scale, bc_type="zero", physical_dim=1, **kwargs): num_nodes = (N, N) if isinstance(self.geometry, _DefaultGeometry): self.geometry = Image2D(num_nodes) - print("Warning: 2D Cauchy_diff is still experimental. Use at own risk.") + print("Warning: 2D CMRF is still experimental. Use at own risk.") elif physical_dim == 1: num_nodes = self.dim else: @@ -89,7 +89,7 @@ def _gradient(self, val, **kwargs): warnings.warn('Gradient not implemented for {}'.format(type(self.location))) def _sample(self,N=1,rng=None): - raise NotImplementedError("'Cauchy_diff.sample' is not implemented. Sampling can be performed with the 'sampler' module.") + raise NotImplementedError("'CMRF.sample' is not implemented. Sampling can be performed with the 'sampler' module.") # def cdf(self, x): # TODO # return 1/np.pi * np.atan((x-self.loc)/self.scale) diff --git a/cuqi/problem/_problem.py b/cuqi/problem/_problem.py index d208fe152..6812019a1 100644 --- a/cuqi/problem/_problem.py +++ b/cuqi/problem/_problem.py @@ -5,7 +5,7 @@ import cuqi from cuqi import config -from cuqi.distribution import Distribution, Gaussian, InverseGamma, LMRF, GMRF, Lognormal, Posterior, Beta, JointDistribution, Gamma, Cauchy_diff +from cuqi.distribution import Distribution, Gaussian, InverseGamma, LMRF, GMRF, Lognormal, Posterior, Beta, JointDistribution, Gamma, CMRF from cuqi.density import Density from cuqi.model import LinearModel, Model from cuqi.likelihood import Likelihood @@ -577,7 +577,7 @@ def gradfunc(x): return -density.gradient(x) if disp: print("Optimizing with approximate gradients.") # Compute point estimate - if self._check_posterior(self, Cauchy_diff, must_have_gradient=True): # Use L-BFGS-B for Cauchy_diff prior as it has better performance for this multi-modal posterior + if self._check_posterior(self, CMRF, must_have_gradient=True): # Use L-BFGS-B for CMRF prior as it has better performance for this multi-modal posterior if disp: print(f"Using scipy.optimize.L_BFGS_B on negative log of {density.__class__.__name__}") if disp: print("x0: ones vector") solver = cuqi.solver.L_BFGS_B(func, x0, gradfunc=gradfunc) diff --git a/demos/demo00_MinimalExample.py b/demos/demo00_MinimalExample.py index f1735b818..c8f1ece2d 100644 --- a/demos/demo00_MinimalExample.py +++ b/demos/demo00_MinimalExample.py @@ -4,7 +4,7 @@ import numpy as np import cuqi from cuqi.model import LinearModel -from cuqi.distribution import Gaussian, LMRF, Cauchy_diff, Gamma +from cuqi.distribution import Gaussian, LMRF, CMRF, Gamma from cuqi.problem import BayesianProblem # %% Minimal example @@ -43,7 +43,7 @@ # switch prior again # Set up Bayesian model for inverse problem A = LinearModel(Amat) # y = Ax. Model for inverse problem -x = Cauchy_diff(np.zeros(n), 0.01, bc_type='zero') # x ~ Cauchy_diff(0,0.01), Zero BC +x = CMRF(np.zeros(n), 0.01, bc_type='zero') # x ~ CMRF(0,0.01), Zero BC y = Gaussian(A@x, 0.05) # y ~ N(Ax,0.05) IP = BayesianProblem(y, x).set_data(y=y_data) # Bayesian problem given observed data samples = IP.UQ(exact=x_exact) # Run UQ analysis @@ -84,7 +84,7 @@ A = LinearModel(Amat) # y = Ax. Model for inverse problem d = Gamma(1, 1e-2) # d ~ Gamma(1, 10^-2) l = Gamma(1, 1e-2) # l ~ Gamma(1, 10^-2) - x = Cauchy_diff(np.zeros(n), lambda d: 1/d) # x ~ Cauchy_diff(0, d^{-1}), Zero BC + x = CMRF(np.zeros(n), lambda d: 1/d) # x ~ CMRF(0, d^{-1}), Zero BC y = Gaussian(A@x, cov=lambda l: 1/l) # y ~ N(Ax, l^-1) IP = BayesianProblem(y, x, d, l).set_data(y=y_data) # Bayesian problem given observed data samples = IP.UQ(Ns = 1000, exact={"x":x_exact, "l":400}) # Run UQ analysis diff --git a/demos/demo03_BayesianInversion.py b/demos/demo03_BayesianInversion.py index 990ed9b9b..1a1267cf7 100644 --- a/demos/demo03_BayesianInversion.py +++ b/demos/demo03_BayesianInversion.py @@ -35,7 +35,7 @@ loc = np.zeros(n) delta = 1 scale = delta*h -P2 = cuqi.distribution.Cauchy_diff(loc, scale, 'neumann', name="x") +P2 = cuqi.distribution.CMRF(loc, scale, 'neumann', name="x") #%% Generate and display some prior samples sp1 = P1.sample(5) diff --git a/demos/demo04_Deconvolution.ipynb b/demos/demo04_Deconvolution.ipynb index 298857f04..914cc9a22 100644 --- a/demos/demo04_Deconvolution.ipynb +++ b/demos/demo04_Deconvolution.ipynb @@ -47,7 +47,7 @@ "source": [ "from cuqi.testproblem import Deconvolution1D\n", "from cuqi.model import LinearModel\n", - "from cuqi.distribution import Gaussian, LMRF, Cauchy_diff\n", + "from cuqi.distribution import Gaussian, LMRF, CMRF\n", "from cuqi.sampler import CWMH\n", "from cuqi.problem import BayesianProblem\n", "from cuqi.samples import Samples" @@ -541,7 +541,7 @@ "outputs": [], "source": [ "scale = 2/dim\n", - "IP.prior = Cauchy_diff(np.zeros(dim), scale, 'neumann', name=\"x\")" + "IP.prior = CMRF(np.zeros(dim), scale, 'neumann', name=\"x\")" ] }, { diff --git a/demos/demo12_NUTS.py b/demos/demo12_NUTS.py index 524ca6538..cb3fdedec 100644 --- a/demos/demo12_NUTS.py +++ b/demos/demo12_NUTS.py @@ -20,7 +20,7 @@ loc = np.zeros(n) delta = 1 scale = delta*h -prior = cuqi.distribution.Cauchy_diff(loc, scale, 'neumann') +prior = cuqi.distribution.CMRF(loc, scale, 'neumann') # %% x0 = cuqi.distribution.Gaussian(np.zeros(n),1).sample() diff --git a/demos/demo12_gradients.py b/demos/demo12_gradients.py index ae1e3b4c3..afb9e74c4 100644 --- a/demos/demo12_gradients.py +++ b/demos/demo12_gradients.py @@ -21,7 +21,7 @@ elif (pr == 'cauchy'): h = 1/n delta = 0.3 - prior = cuqi.distribution.Cauchy_diff(np.zeros(n), delta*h, 'neumann') + prior = cuqi.distribution.CMRF(np.zeros(n), delta*h, 'neumann') # %% # Compare with MAP computed using BayesianProblem (within the testproblem) diff --git a/demos/demo27_ULA.py b/demos/demo27_ULA.py index 951e12409..881690286 100644 --- a/demos/demo27_ULA.py +++ b/demos/demo27_ULA.py @@ -20,7 +20,7 @@ loc = np.zeros(n) delta = 1 scale = delta*h -prior = cuqi.distribution.Cauchy_diff(loc, scale, 'neumann') +prior = cuqi.distribution.CMRF(loc, scale, 'neumann') # %% Create the posterior and the sampler posterior = cuqi.distribution.Posterior(likelihood, prior) diff --git a/demos/demo28_MALA.py b/demos/demo28_MALA.py index 13e0d2c2d..d3a9051fb 100644 --- a/demos/demo28_MALA.py +++ b/demos/demo28_MALA.py @@ -17,7 +17,7 @@ # Define Prior loc = np.zeros(n) -prior = cuqi.distribution.Cauchy_diff(loc, .2, 'neumann') +prior = cuqi.distribution.CMRF(loc, .2, 'neumann') # %% Create the posterior and the sampler posterior = cuqi.distribution.Posterior(likelihood, prior) diff --git a/demos/try03_Cauchy_MCMC.py b/demos/try03_Cauchy_MCMC.py index e7dd8774e..52e072e02 100644 --- a/demos/try03_Cauchy_MCMC.py +++ b/demos/try03_Cauchy_MCMC.py @@ -39,7 +39,7 @@ def likelihood_logpdf(x): return test.likelihood.logd(x) loc = np.zeros(n) delta = 1 scale = delta*h -prior = cuqi.distribution.Cauchy_diff(loc, scale, 'neumann') +prior = cuqi.distribution.CMRF(loc, scale, 'neumann') def prior_logpdf(x): return prior.logd(x) # ============================================================================= diff --git a/demos/try10_Data_Object.py b/demos/try10_Data_Object.py index f8a51ebbe..5485fd156 100644 --- a/demos/try10_Data_Object.py +++ b/demos/try10_Data_Object.py @@ -14,7 +14,7 @@ import cuqi from cuqi.testproblem import Deconvolution1D from cuqi.model import LinearModel -from cuqi.distribution import Gaussian, LMRF, Cauchy_diff +from cuqi.distribution import Gaussian, LMRF, CMRF from cuqi.sampler import CWMH from cuqi.problem import BayesianProblem from cuqi.samples import Samples diff --git a/demos/try12_Deconvolution_BayesianProblem.py b/demos/try12_Deconvolution_BayesianProblem.py index 6a32aa5d5..d13056fdd 100644 --- a/demos/try12_Deconvolution_BayesianProblem.py +++ b/demos/try12_Deconvolution_BayesianProblem.py @@ -6,7 +6,7 @@ import numpy as np import matplotlib.pyplot as plt -from cuqi.distribution import Gaussian, Cauchy_diff, LMRF +from cuqi.distribution import Gaussian, CMRF, LMRF from cuqi.distribution import GMRF, LMRF, Laplace, Beta, InverseGamma, Lognormal from cuqi.sampler import NUTS, CWMH @@ -24,12 +24,12 @@ if ndim == 1: Ns = 1000 if ndim == 2: Ns = 500 -# %% Prior choices (Main ones of interest: Gaussian, GMR, Cauchy_diff, LMRF: +# %% Prior choices (Main ones of interest: Gaussian, GMR, CMRF, LMRF: # Working choices #TP.prior = Gaussian(mean=np.zeros(n), cov=par**2, geometry=TP.model.domain_geometry) #TP.prior = GMRF(np.zeros(n), 1/par**2, ndim, "zero", geometry=TP.model.domain_geometry) # Odd behavior (swingy?) -TP.prior = Cauchy_diff(location=np.zeros(n), scale=0.01, bc_type="zero", physical_dim=ndim, geometry=TP.model.domain_geometry) +TP.prior = CMRF(location=np.zeros(n), scale=0.01, bc_type="zero", physical_dim=ndim, geometry=TP.model.domain_geometry) #TP.prior = LMRF(location=np.zeros(n), scale=0.01, bc_type="neumann", physical_dim=ndim, geometry=TP.model.domain_geometry) # Bad choices (ignore) both 1D and 2D diff --git a/tests/data/Deconvolution1D_square_Cauchy_diff_50.npz b/tests/data/Deconvolution1D_square_CMRF_50.npz similarity index 100% rename from tests/data/Deconvolution1D_square_Cauchy_diff_50.npz rename to tests/data/Deconvolution1D_square_CMRF_50.npz diff --git a/tests/data/Deconvolution1D_square_Laplace_diff_100.npz b/tests/data/Deconvolution1D_square_Laplace_diff_100.npz deleted file mode 100644 index 7a8f4c97437e1f0971cb4ff4efc5e205e23a23a4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5084 zcmd6rc|4U{+s6?Z6B0s(N=k;(L?0eXsBOUCY*L1-~@UzeAPh zY3sv%Bz_*AUrmHZnkUG|JHRtoJ2;fU%OmkW*ZcS*w?|C~yFXdd!{9YBr>L0VVE?JLwQ(3wrV`;#6Uq!=8Ej zKz*bpa7g%gsx2}cyuxpgc?i+=PfOk|HbV=#t&=;A`Jo#v?FFIxiNa{g6{gdbrIwxiLA-nwE$yf; zYzvbM%;EJ1jW?+s$@4*w*L#d1_aYFAua+r>XJDbPdJP}X%MdsuTf$jS_lEwLNrl~e zy_Gn9X9*Sh(Ti;&u$U-A)w8LZY|A;IAmszgGQ$y0|z2+ z+kNSE<-q3&j}8%k4j5ox8mQwqpx8c5u*GtqGK;MAB!~m^(^Elaz8s+33d;{Y4O zzfWJE1FK!wa$0H}m=Fx4U9JBB%*(MWjb6@xy_6x{cd#E`%O+TNAMAx;AK&LD=o;** z9E-B@E&vH$CtqQDJ}_UcFeJao0NsR%Q%-es*rK9o+$V{HGbd*=^*vo6cRA$x#@-#s zx$W{tsTaXW!f37jTYfC+9x4!vIuwr5nofOjFNs8~w>>tUWdsy?QhnG^Bmzlmp=GO5 zEMisIR`4rCAY#a^b^5JgNM=26kxQ%(V%%JlQ20F%9Z9D9uW>d;MAx@kl!L11tg_Ud ziV7{HVHvfPiP-|bJ)71DI%x;vl;GR7b|+v%+eq*TEdaW|RWgJv0^plPeNQhl82FV^ zA{0)X0y*}??t}xr@YUhky|xD~a0Pb6uiEbe*1x%LKg{w5ek7O|((4IwgW1oWjJ-fH ztFqjn-3dIcgzs&3S4CdA`&{R|_CuGOiiFCQ9>a_)#ahEx?a>dJV`CwfF6cnX4&f2( z@2J~Nak4bp49Ol`uhHJW157zHD3@vu1+;v|+>XP@Qo>_+^<#5Hy7S#PbKVX$Zwpvd zb~8a|-wex+_U=b&E>7P(3Hor4E%0Igc^lYmO0^BjmxYJyA6iErh$7p+u%KO|s9)-jDyNL-o#Bewk;_Anz6AmR+E|JakIQXr5=17c3 zINYv^pKM=`2RX$)WpYadth_lZE2kd`Hh}0QX*dP*#{lgYPXeE_XBrvPQE`05uv`>if+RTF zqIkG55VX+7)xQ$h@1ye_4-}2H01A<1a&r?W&NY+I1~DUh55Pd~I`@(4*_n*Q24;71yq4I0#ZLCk=gvCG~8 z6PICwURu?7%Wata{Lu2ddLvYHEw+4fs{*2|pEaAW0v7!<f}zcr4Ou!dpvNf}EIu3!i%sh$Q#M6|@K6KkWib&ZeMDc|vLc}nkC+>fKN}%JkoVKd)?hp!*ONUtU3kb!ta*GfGZG%1pj!SG zMuf_)j;%A{1UNWwTysNPG&E6EM0cj)AxNvv+-w35nQl0}cSrEBP0Ha_33naD+4;^* z#zuh-V+Bdi5DymuyIka(BEd>RwbnT}0u)vIKRA2{hbJX<8-@3Vz+chD8E-H854wc7 z=n4$c-~CS|SEQEjx8}nMDA082PH0O4syyMKCRvk!n!CQ;JC;pBM2gH!AyOhTS@EON zdMOd<(Iy~+k%U@|6SeC&RP^kqms?gi8HsijU zv~d#A=pNINeIF@k^nG^g9}84O`WjmNUYjl@p?RJsC)X3X?q$? zZ%pGSc#u$@xti5+nbXL4R)KfuO$?&w)<~NLkWi5Ro2~qAB-BwCeU9T8i^6wOvI-8w zpiK&^I*)uIBRjeh1->rmok16* zW1GnnIY{hYf!Rb+Haa_}-S}Wv4#GOz`uSuF8)=v1uT^i)M7vlmWy@_0R9UlN5`4!G z?QpqYyms$-xY=cId?~jJ;xNzg8{=O>lVY50`};94dmt>$shofh^~ox|%vt#2nAvo$ z@B?ISUacJP<^w$IcE49H#(^WXMa}FD9ANNWuoze6KwrTmzq>XE>eZbpiq>-=HLm=y zB-gZ^)jX5jEWtHl0#_t26ny}Rdb!C;%m>(`zVMRNKLf9i)$rh7P62U0jMqMT0kL5$ zq1$CWFfKVgP{Jldr*g>S%G(j}u)*c>bdCXPBVUUD+-eKVV?o!Sieb^9=o}k;q@(9* zI`{WkrXcN#JXXV-G-TevmbKtdL!+(l?&3z%QQn&I%-ysM^%(R74igbUOtrl37<7sIp-Qqlg@iECZ0X{e`qvan1t z9bGQUGAm*-(NRk8*BU1}l1aCbHpj-JpmSM*UxMP0Ny|}di5v=Y+q;7FbsZJqIvmdE z=#mk!VTZNp0tJa&lrxtzq9Qd}^RGY0>8N9;%|^jU3KIHws1aU}(cX)5qK926h$)Sp zSO(M4YG$&7T{RhP?mVlND^Ew$b#+ya&nRdi)ZNdshl)<9mF>B3jD`-#eYsvUMnl^^ z{WOcUq9K}$;q#tjEHrH}e)fnf1AV8HY)H46=%Fe7?(8b=`nmGx?8h4vq-dM#+mS#< zcXXF!O>1b#B%N(56Tv_isRPsONjiG`S#kK(!DM7Fw(_QRDh<(vyf!l}DX6;UeJZ7k z^f%KLBlB(X&*X{-{pXTP@3o4OP;vs)&@SJR|3(AkcXh-oi2+hcH)&kw6L+ums>FMloR0;+>1~+E_IT(}7c-2_O$2`jMs9UH2^`0cPRH*e zfl*38*1?tW5Z#27~cMHE;tO{_{n?r?8QO>Oxz#bi3i7!d%J!d#lvGK(TvZJ!l3^ALV*x31u$-! zFWD0mcr1W9AtS?rNdcO)x-1)R51o(b(K!bL3ETZFjV=P)bM(#AgkrFl`=EIUR{`ap z*ym1JKLG79{K+fs!;r=ow(7R?7<44@w4B%BlILK-$*bIQ&=>PbEj z#@g}=QF$DQ8c^Jgxcv_cKPJA61Nr%t@y+!dh&VN4^|pxv1@9C~ls}9l zr}A-$k_Sy-X)EPEcc}&>mNPdhT9kuSq2<)8#sVl@v8I|SnFne&2kw;I$^v(R!;eGk z(t*RS7Mbcz0&P{Dl-+(*sE>H=5>6n(gL=oIfmI1$hLgI!@>(o>7AglqNF?l%z@7^% zAi>*#ebqDsD0HrxL8RC5%s0!$opg<#2a7y zxSI?KxU!HJFKKYH{c#ahfUx2uUjYgSh%%t?gy!R5I1wKTZ1 zYNlh5mk#nndI_EH=}_b&l22YDgAy-Zc&8BsO6{B{rQ6~`)2p~E!z+&Kxp?`cx$f%U z@%O{7|4nl{67R43|2z8nyY8=r=AU)-+|u*EW#`{ze>Hi3mf?kY{%iUEuKFur|E$^} a&cpK`+}c_Rto${{&%OG&{gl+N(Z2w_)`0i` diff --git a/tests/test_bayesian_inversion.py b/tests/test_bayesian_inversion.py index 7f9fb6349..86a515473 100644 --- a/tests/test_bayesian_inversion.py +++ b/tests/test_bayesian_inversion.py @@ -5,7 +5,7 @@ import sys from cuqi.testproblem import Deconvolution1D -from cuqi.distribution import Gaussian, GMRF, Cauchy_diff, LMRF, Gamma +from cuqi.distribution import Gaussian, GMRF, CMRF, LMRF, Gamma from cuqi.problem import BayesianProblem from cuqi.density import Density @@ -15,12 +15,12 @@ (Deconvolution1D, "gauss", Gaussian(np.zeros(128), 0.071**2), 20), (Deconvolution1D, "gauss", GMRF(np.zeros(128), 100, 1, "zero"), 20), (Deconvolution1D, "square", LMRF(np.zeros(128), 0.005), 100), - (Deconvolution1D, "square", Cauchy_diff(np.zeros(128), 0.01), 50), + (Deconvolution1D, "square", CMRF(np.zeros(128), 0.01), 50), ]) def test_TP_BayesianProblem_sample(copy_reference, TP_type, phantom, prior, Ns): # SKIP NUTS test if not windows (for now) - if isinstance(prior, Cauchy_diff) and not sys.platform.startswith('win'): - pytest.skip("NUTS(Cauchy_diff) regression test is not implemented for this platform") + if isinstance(prior, CMRF) and not sys.platform.startswith('win'): + pytest.skip("NUTS(CMRF) regression test is not implemented for this platform") np.random.seed(19937) diff --git a/tests/test_distribution.py b/tests/test_distribution.py index e7a5e6438..97d374b40 100644 --- a/tests/test_distribution.py +++ b/tests/test_distribution.py @@ -621,13 +621,13 @@ def test_enable_FD_gradient_posterior(x, y, x_i): assert np.allclose(g_exact, g_FD) and np.all(g_exact != g_FD)\ or (np.all(np.isnan(g_exact)) and np.all(np.isnan(g_FD))) -def test_Cauchy_diff_should_not_allow_non_zero_location(): - """" Cauchy_diff should not allow non-zero location. """ +def test_CMRF_should_not_allow_non_zero_location(): + """" CMRF should not allow non-zero location. """ with pytest.raises(ValueError): - cuqi.distribution.Cauchy_diff(np.ones(5), 1) + cuqi.distribution.CMRF(np.ones(5), 1) with pytest.raises(ValueError): - cuqi.distribution.Cauchy_diff(lambda x: x, 1) + cuqi.distribution.CMRF(lambda x: x, 1) def test_LMRF_should_not_allow_non_zero_location(): """" LMRF should not allow non-zero location. """ diff --git a/tests/test_sampler.py b/tests/test_sampler.py index ecb8b10d1..1147cd374 100644 --- a/tests/test_sampler.py +++ b/tests/test_sampler.py @@ -3,7 +3,7 @@ import sys -from cuqi.distribution import Gaussian, Cauchy_diff, Gaussian, LMRF, GMRF +from cuqi.distribution import Gaussian, CMRF, Gaussian, LMRF, GMRF from cuqi.sampler import pCN import pytest @@ -16,7 +16,7 @@ def test_CWMH_modify_proposal(): x0 = 0.5*np.ones(n) # Set up target - target_dist = cuqi.distribution.Cauchy_diff(np.zeros(n), 0.5, 'neumann') + target_dist = cuqi.distribution.CMRF(np.zeros(n), 0.5, 'neumann') def target(x): return target_dist.pdf(x) # Set up proposals @@ -140,7 +140,7 @@ def test_sampler_geometry_assignment(): x0 = 0.5*np.ones(n) # Set up target - target = cuqi.distribution.Cauchy_diff(np.zeros(n), 0.5, 'neumann') + target = cuqi.distribution.CMRF(np.zeros(n), 0.5, 'neumann') target.geometry = cuqi.geometry.Continuous2D((1,2)) # Set up proposals @@ -279,7 +279,7 @@ def test_ULA_regression(copy_reference): loc = np.zeros(n) delta = 1 scale = delta*h - prior = cuqi.distribution.Cauchy_diff(loc, scale, 'neumann') + prior = cuqi.distribution.CMRF(loc, scale, 'neumann') # %% Create the posterior and the sampler posterior = cuqi.distribution.Posterior(likelihood, prior) diff --git a/tests/test_testproblem.py b/tests/test_testproblem.py index aa83e1883..2caace06d 100644 --- a/tests/test_testproblem.py +++ b/tests/test_testproblem.py @@ -151,7 +151,7 @@ def test_Abel(): @pytest.mark.parametrize("prior",[ (cuqi.distribution.Gaussian(np.zeros(128**2), 1, name="x")), #(cuqi.distribution.LMRF(np.zeros(128**2), 1, "zeros")), - #(cuqi.distribution.Cauchy_diff(np.zeros(128**2), 1, "zeros")), + #(cuqi.distribution.CMRF(np.zeros(128**2), 1, "zeros")), ]) def test_Deconvolution2D_Sampling_prior(prior): tp = cuqi.testproblem.Deconvolution2D(prior=prior) From 3c6e9b4a81531fe3954a1ee855590fd4b4f5a934 Mon Sep 17 00:00:00 2001 From: nabr Date: Fri, 26 May 2023 13:57:43 +0200 Subject: [PATCH 3/3] Replace reg data for LMRF --- .../data/Deconvolution1D_square_LMRF_100.npz | Bin 5084 -> 5084 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/tests/data/Deconvolution1D_square_LMRF_100.npz b/tests/data/Deconvolution1D_square_LMRF_100.npz index 2aa393c2b000a3947a19df098649fe111b175202..7a8f4c97437e1f0971cb4ff4efc5e205e23a23a4 100644 GIT binary patch literal 5084 zcmd6rc|4U{+s6?Z6B0s(N=k;(L?0eXsBOUCY*L1-~@UzeAPh zY3sv%Bz_*AUrmHZnkUG|JHRtoJ2;fU%OmkW*ZcS*w?|C~yFXdd!{9YBr>L0VVE?JLwQ(3wrV`;#6Uq!=8Ej zKz*bpa7g%gsx2}cyuxpgc?i+=PfOk|HbV=#t&=;A`Jo#v?FFIxiNa{g6{gdbrIwxiLA-nwE$yf; zYzvbM%;EJ1jW?+s$@4*w*L#d1_aYFAua+r>XJDbPdJP}X%MdsuTf$jS_lEwLNrl~e zy_Gn9X9*Sh(Ti;&u$U-A)w8LZY|A;IAmszgGQ$y0|z2+ z+kNSE<-q3&j}8%k4j5ox8mQwqpx8c5u*GtqGK;MAB!~m^(^Elaz8s+33d;{Y4O zzfWJE1FK!wa$0H}m=Fx4U9JBB%*(MWjb6@xy_6x{cd#E`%O+TNAMAx;AK&LD=o;** z9E-B@E&vH$CtqQDJ}_UcFeJao0NsR%Q%-es*rK9o+$V{HGbd*=^*vo6cRA$x#@-#s zx$W{tsTaXW!f37jTYfC+9x4!vIuwr5nofOjFNs8~w>>tUWdsy?QhnG^Bmzlmp=GO5 zEMisIR`4rCAY#a^b^5JgNM=26kxQ%(V%%JlQ20F%9Z9D9uW>d;MAx@kl!L11tg_Ud ziV7{HVHvfPiP-|bJ)71DI%x;vl;GR7b|+v%+eq*TEdaW|RWgJv0^plPeNQhl82FV^ zA{0)X0y*}??t}xr@YUhky|xD~a0Pb6uiEbe*1x%LKg{w5ek7O|((4IwgW1oWjJ-fH ztFqjn-3dIcgzs&3S4CdA`&{R|_CuGOiiFCQ9>a_)#ahEx?a>dJV`CwfF6cnX4&f2( z@2J~Nak4bp49Ol`uhHJW157zHD3@vu1+;v|+>XP@Qo>_+^<#5Hy7S#PbKVX$Zwpvd zb~8a|-wex+_U=b&E>7P(3Hor4E%0Igc^lYmO0^BjmxYJyA6iErh$7p+u%KO|s9)-jDyNL-o#Bewk;_Anz6AmR+E|JakIQXr5=17c3 zINYv^pKM=`2RX$)WpYadth_lZE2kd`Hh}0QX*dP*#{lgYPXeE_XBrvPQE`05uv`>if+RTF zqIkG55VX+7)xQ$h@1ye_4-}2H01A<1a&r?W&NY+I1~DUh55Pd~I`@(4*_n*Q24;71yq4I0#ZLCk=gvCG~8 z6PICwURu?7%Wata{Lu2ddLvYHEw+4fs{*2|pEaAW0v7!<f}zcr4Ou!dpvNf}EIu3!i%sh$Q#M6|@K6KkWib&ZeMDc|vLc}nkC+>fKN}%JkoVKd)?hp!*ONUtU3kb!ta*GfGZG%1pj!SG zMuf_)j;%A{1UNWwTysNPG&E6EM0cj)AxNvv+-w35nQl0}cSrEBP0Ha_33naD+4;^* z#zuh-V+Bdi5DymuyIka(BEd>RwbnT}0u)vIKRA2{hbJX<8-@3Vz+chD8E-H854wc7 z=n4$c-~CS|SEQEjx8}nMDA082PH0O4syyMKCRvk!n!CQ;JC;pBM2gH!AyOhTS@EON zdMOd<(Iy~+k%U@|6SeC&RP^kqms?gi8HsijU zv~d#A=pNINeIF@k^nG^g9}84O`WjmNUYjl@p?RJsC)X3X?q$? zZ%pGSc#u$@xti5+nbXL4R)KfuO$?&w)<~NLkWi5Ro2~qAB-BwCeU9T8i^6wOvI-8w zpiK&^I*)uIBRjeh1->rmok16* zW1GnnIY{hYf!Rb+Haa_}-S}Wv4#GOz`uSuF8)=v1uT^i)M7vlmWy@_0R9UlN5`4!G z?QpqYyms$-xY=cId?~jJ;xNzg8{=O>lVY50`};94dmt>$shofh^~ox|%vt#2nAvo$ z@B?ISUacJP<^w$IcE49H#(^WXMa}FD9ANNWuoze6KwrTmzq>XE>eZbpiq>-=HLm=y zB-gZ^)jX5jEWtHl0#_t26ny}Rdb!C;%m>(`zVMRNKLf9i)$rh7P62U0jMqMT0kL5$ zq1$CWFfKVgP{Jldr*g>S%G(j}u)*c>bdCXPBVUUD+-eKVV?o!Sieb^9=o}k;q@(9* zI`{WkrXcN#JXXV-G-TevmbKtdL!+(l?&3z%QQn&I%-ysM^%(R74igbUOtrl37<7sIp-Qqlg@iECZ0X{e`qvan1t z9bGQUGAm*-(NRk8*BU1}l1aCbHpj-JpmSM*UxMP0Ny|}di5v=Y+q;7FbsZJqIvmdE z=#mk!VTZNp0tJa&lrxtzq9Qd}^RGY0>8N9;%|^jU3KIHws1aU}(cX)5qK926h$)Sp zSO(M4YG$&7T{RhP?mVlND^Ew$b#+ya&nRdi)ZNdshl)<9mF>B3jD`-#eYsvUMnl^^ z{WOcUq9K}$;q#tjEHrH}e)fnf1AV8HY)H46=%Fe7?(8b=`nmGx?8h4vq-dM#+mS#< zcXXF!O>1b#B%N(56Tv_isRPsONjiG`S#kK(!DM7Fw(_QRDh<(vyf!l}DX6;UeJZ7k z^f%KLBlB(X&*X{-{pXTP@3o4OP;vs)&@SJR|3(AkcXh-oi2+hcH)&kw6L+ums>FMloR0;+>1~+E_IT(}7c-2_O$2`jMs9UH2^`0cPRH*e zfl*38*1?tW5Z#27~cMHE;tO{_{n?r?8QO>Oxz#bi3i7!d%J!d#lvGK(TvZJ!l3^ALV*x31u$-! zFWD0mcr1W9AtS?rNdcO)x-1)R51o(b(K!bL3ETZFjV=P)bM(#AgkrFl`=EIUR{`ap z*ym1JKLG79{K+fs!;r=ow(7R?7<44@w4B%BlILK-$*bIQ&=>PbEj z#@g}=QF$DQ8c^Jgxcv_cKPJA61Nr%t@y+!dh&VN4^|pxv1@9C~ls}9l zr}A-$k_Sy-X)EPEcc}&>mNPdhT9kuSq2<)8#sVl@v8I|SnFne&2kw;I$^v(R!;eGk z(t*RS7Mbcz0&P{Dl-+(*sE>H=5>6n(gL=oIfmI1$hLgI!@>(o>7AglqNF?l%z@7^% zAi>*#ebqDsD0HrxL8RC5%s0!$opg<#2a7y zxSI?KxU!HJFKKYH{c#ahfUx2uUjYgSh%%t?gy!R5I1wKTZ1 zYNlh5mk#nndI_EH=}_b&l22YDgAy-Zc&8BsO6{B{rQ6~`)2p~E!z+&Kxp?`cx$f%U z@%O{7|4nl{67R43|2z8nyY8=r=AU)-+|u*EW#`{ze>Hi3mf?kY{%iUEuKFur|E$^} a&cpK`+}c_Rto${{&%OG&{gl+N(Z2w_)`0i` literal 5084 zcmd6rXIK>3wuW1Rf|x->5ebr1Bn={x7CHh7f~cS{D9I)XN|c-=ieykylUqVV6B$w4v*3SPM9o^xmB{=2_s)w65WUc0^@RrNe?eQT>JQ`0bE{uP`UPPgVw z4;l<+XY9o=VICV|P4%A$KCyJ%g*o)!)tx(cR(B#oRYOHhYZt~2OrxPNkr+<)m9D)2(&IP=ribDya0-PG(eW0&F z`0h*ncpyahh%TE%!m8z!Q!1>Xpss!8GfCSYzB(PVnSJmKy3FvoYSDIRXw^$UhVTZd zPLUX3^9^#%;h$f@<)SU=B4ah0FpxGrWWF@%3qe+qCAA&VkaqC$%5Loxi1E)qqk23Y zRH>1zcX<~4q+|JhhdK)+ctT>j2Z#{*lc$GoGZLQDYOh9CKS$Qje&?g;H^HpvC>^47_&9v)>%?ON~&9Q=|eeklZ)kcr71_B<^@*C4CQEO z*9@H*t{hQ?<|zeCzD2bJrZb81+gVAHm)|W9*$JL=Na)W*4u|(tA8GFcg^}9bF{i(P z(z{oQmfyZYdq(4K!+|m2y2u;!NM{i|UE?Jl#wj$!Herj(!;toc#t_?uH#b zSLUENLWD0;aSrCTPvnDh(7fLJtRQC^F5f0yZ+tfa%iN4{;aB^?PQS%6KDGz$;t@IQ z)F((xocU_y(S(lNC=zsRs7E7*y|9yaDv^2L?$;z@0t((^LVQ}A0Ig%2msU?0pt$5S z_ILYp(b*Z_hXi?qZm+J_7d^`WTMgO3;Bo@^$DgQdZ_k5~<&&&iTp3`MVk9g5ngs3o zinHZZIPh&b8lalkg2-E{nc^|csO$36=iR(z(8!(b`;@r?bZ(v-^Se_Gsx}Rth*$+? zGIyj~I4VK!2X_i4vld0&G8Xcisz-ve$3xsia*=9X`OAQtai}u;k>7rYB51k5uh=kE z1iTzu>rd?{;QPdSkF%L2+`UD7PrJVWQPalJ?E%~HS5?Fc1`VK4Oz-e99KwmHlVi{Hi zf7+Eoz^A9Yw(r0)&S#7&6JEme#g8Tr9dYoYhnjS;9tXRh z$=(p*BEalNzce3W2GHtrxmC_2!PQqaVpz)}NOWJTDki6ZH|>(kQ~q=ydKi^weKy|wFMRjCd^Q3~q;4MnVH^+radjRJN8(^!p#Jl^ zVLZ@h*}50+&w+GZH6}`9JcNbKc-&-81n(%Uq zB9cK^LBfq!?IkE)QFXn}m|*>z=q|*ioJ9u-0i*= zK3Sdw@{YdC!|f?xDy$Nc(vb$pil?H(G7mb&=3#{=}8_OJ;2kfM-@ZDs@xNJQi0-YNNJ;yoCuXB+(Be~9voXR&CgG*fRwj)`wZ2R0F_Oasq76->y!8 z@yXHzEm=I^Id{{3W=;l{uV)hD3kpC@@B7@iVHV(BZ<8I}3g8!ui;vcHD&$_a5kG7e z4Z)Qhh4$`6VB4FQV8@aGo@ThAF5T^4GP9meolX?2to6&h{TTzng&am=10*0{yOz}v zf`_hAHX53Y80i1`I5k@;3LemMsOtTA1`moqsL^Bx0Mjw2n@ZLBz&6~<|J^Mg5}N(L zq)Ov~nfoxSKUEAkCI3{;|5geH4e1YzJ(GboqhwJuJ{`)@aj&qaiSSo+8Ro5t{~KL& z+vqa4xOCy4N-h&DU#ttGD-3T)F}&FDg-2pz#9NvEh#L6mEBP|eB{rK!j>1hyMNO`I zQ?3#fH|VIVZksR;p+!1FCoJ+5eO_B#;S3MnrRiK<$VVAmDwPusmyx@)*FEQ@62ubl zb_0`9hgcY~=Yw>skZ@1b!6ymjXw~!@W?`-taoV%*K5r3+Tof!82ew>Lsbjvdk$eXt zpMQX58BIg}uL9@ISnv?sJv9(a!h_$M;w7`HNF+YhsqH!+11ZsNZG~hXG&<^Unsc!L zdGI>Y9=zX$LN!|sKmM%-nXVjd;~%a@;o90dQc^W&pzAq3)*Xi=DgtxQiiZPDzph6b zYce$TL~1c*TZ3K6`Sn9?dBCw{<%0Lk0QUtQp9Ixb)W_1r$vgT6`3mAOCj24D8s~{I zFm6FIImH&DHw%$btyDX)CK^0PMA&56L!jRnzjS3x4+K&wUdcyztvp-iqi9~ z<-dKXLUT=*qh08#5M6zj+j>bS;*1WgN@jS64z^3+of=CKt!pKgA-fzoH$;m&GUT8s zqrQ^Xu5OT%=AkZR9)_^8x;Q3-FK{ydDxp_&6fRs8l4!A7fQ8dOLF)Gx;pclXN#C9Y zs7M$6C>t>kJo)rlxr(ze8Qv(RLO%&t{c>vwnbVN=5PmGEOai-24~0f-22%Bviw;mt zz!1~mEFIMhEL&>KOBH{E_BDzQr_mJDQQ|bZ&=jDkI}KJ%6OdXUFhcPgg?-D7{OxnS zkaF#gq7uU=xHjo#x*XC3f!51W=8K)E-QvwGd&ozG?Fo>~jIBW`hU*HS21$tM@DA}w z;}LiKRIzMbG4cv$(9({iAQ{Q?QxZNs=;n{H42ozN5>4Es#2@m2u+MS>CTyPIN}WmE z72biC14R_+iGIM`ZhD`2I0|J7v7eJ{>qY5@xxS3bwj#A)+|Na-WKdULK0H#Dg`8;J z*GzrNVb)z!K6a}KRavs}Bf)eK-E(^L1i1~}lVrYE8`_R&774Yvr;Ct5L<8ElgDCQ?21-1Me>Yuq zT3c_Ae?CU}(f-ZF#{6WtGH= z_6mByq2637<+NIua6DGzr}!RbsOW}@wJJd@A==#TAQ8TjaCGd~8-RnFOBT1826Td~ zl&z#!5NgXk$}!-F;^C)tadRPC1L%S-PR0ToGhksQcNrVaB zv6bq3>0lK4n%+XJ6tdrHN4JSIK~)@2LMLB2+^GLei*U3CY@3u?C%+a#y(IT3k=rkz zX2tMQ!R|L`Vm#z`9k)_wFL`(?g1ZoGo*5Xo^%WqSaGZrHCKniQ_tUSOECOLV+T;&3 zm7v_g*6bcq52_*B)1`EE5Ug<{JFBM}tO;TFq)T$)TCmP?p*RWDuTBqeeXoUUt_N-t zTvMP&y5eN^S`uKRYWalw$soDE{$lrXHedc!w$I#V=C% zB3BILjpN+Ul0L%LbN2QsLSS@g69hckL6Dc zfmGf1n>=>}Scz!LwY%6J681Lo!}e5=Z!_* za&Lpg)Vq3~#sqNbWL#?5_5?@d1&ngCTu|N-o)Q~EGgvHsczh_M9WK4#6gf6h4fcAr zdz5QP(D`lZBo%utd~bZnqi0im%#@-M->d2qP+E`YVh7L43of5UvTx$@Lrpo(Iail3lj^Dgt@-8(?0` z*1)8kYX0#tD>ShuyN;ha1(F^`?NRDaL>R@$J6LrROyd+r3dc&}w}kmfv){M9qVg^( zrfqliulW0B*Z-8algF-|>;K*Q`ainAG@5_bQMOyp|FoTdm;Ke`{aI#DkNJ<~`@4$s gFM$2CO5^|r^Kaa$Dbw!WS)