From 9d9a980d68840c91617cce86012aeffc073945d2 Mon Sep 17 00:00:00 2001
From: jrzaurin
Date: Sun, 27 Sep 2020 19:28:28 +0200
Subject: [PATCH 01/10] Added a condition to account for 2D images (images with
no color channels). This way we replicate exactly what ToTensor does within
the WideDeepDataset class
---
pytorch_widedeep/models/_wd_dataset.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/pytorch_widedeep/models/_wd_dataset.py b/pytorch_widedeep/models/_wd_dataset.py
index aa5dc6e4..a3dcd2f0 100644
--- a/pytorch_widedeep/models/_wd_dataset.py
+++ b/pytorch_widedeep/models/_wd_dataset.py
@@ -68,6 +68,8 @@ def __getitem__(self, idx: int):
# then we need to replicate what Tensor() does -> transpose axis
# and normalize if necessary
if not self.transforms or "ToTensor" not in self.transforms_names:
+ if xdi.ndim == 2:
+ xdi = xdi[:, :, None]
xdi = xdi.transpose(2, 0, 1)
if "int" in str(xdi.dtype):
xdi = (xdi / xdi.max()).astype("float32")
From 5ee5fbe84288f29aa4a5739c2ea326116c43dc22 Mon Sep 17 00:00:00 2001
From: jrzaurin
Date: Sun, 27 Sep 2020 19:40:41 +0200
Subject: [PATCH 02/10] check travis inconsistency by simply removing a
commented line
---
examples/adult_script.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/examples/adult_script.py b/examples/adult_script.py
index 36e7733b..00930020 100644
--- a/examples/adult_script.py
+++ b/examples/adult_script.py
@@ -113,4 +113,3 @@
# torch.save(model.state_dict(), "model_weights/model_dict.t")
# model = WideDeep(wide=wide, deepdense=deepdense)
# model.load_state_dict(torch.load("model_weights/model_dict.t"))
- # #
From 9a6a24d3e2693cb91a5ef3c2b8a9a71236f0757e Mon Sep 17 00:00:00 2001
From: jrzaurin
Date: Fri, 27 Nov 2020 23:56:13 +0100
Subject: [PATCH 03/10] all started by trying to add a line so that the builder
accepted 2D images (or in general images of dim different than 3. But it
ended up by adding functionalities so that each individual component (wide,
deepdense, deeptext and deepimage) can be used individually
---
examples/adult_script.py | 1 +
pytorch_widedeep/models/_wd_dataset.py | 25 +-
pytorch_widedeep/models/wide_deep.py | 281 ++++++++----------
setup.py | 2 +-
tests/test_model_components/test_wide_deep.py | 2 +-
.../test_data_inputs.py | 46 +++
6 files changed, 194 insertions(+), 163 deletions(-)
diff --git a/examples/adult_script.py b/examples/adult_script.py
index 00930020..36e7733b 100644
--- a/examples/adult_script.py
+++ b/examples/adult_script.py
@@ -113,3 +113,4 @@
# torch.save(model.state_dict(), "model_weights/model_dict.t")
# model = WideDeep(wide=wide, deepdense=deepdense)
# model.load_state_dict(torch.load("model_weights/model_dict.t"))
+ # #
diff --git a/pytorch_widedeep/models/_wd_dataset.py b/pytorch_widedeep/models/_wd_dataset.py
index a3dcd2f0..447877bb 100644
--- a/pytorch_widedeep/models/_wd_dataset.py
+++ b/pytorch_widedeep/models/_wd_dataset.py
@@ -27,11 +27,11 @@ class WideDeepDataset(Dataset):
def __init__(
self,
- X_wide: np.ndarray,
- X_deep: np.ndarray,
- target: Optional[np.ndarray] = None,
+ X_wide: Optional[np.ndarray] = None,
+ X_deep: Optional[np.ndarray] = None,
X_text: Optional[np.ndarray] = None,
X_img: Optional[np.ndarray] = None,
+ target: Optional[np.ndarray] = None,
transforms: Optional[Any] = None,
):
@@ -48,10 +48,12 @@ def __init__(
self.transforms_names = []
self.Y = target
- def __getitem__(self, idx: int):
- # X_wide and X_deep are assumed to be *always* present
- X = Bunch(wide=self.X_wide[idx])
- X.deepdense = self.X_deep[idx]
+ def __getitem__(self, idx: int): # noqa: C901
+ X = Bunch()
+ if self.X_wide is not None:
+ X.wide = self.X_wide[idx]
+ if self.X_deep is not None:
+ X.deepdense = self.X_deep[idx]
if self.X_text is not None:
X.deeptext = self.X_text[idx]
if self.X_img is not None:
@@ -89,4 +91,11 @@ def __getitem__(self, idx: int):
return X
def __len__(self):
- return len(self.X_deep)
+ if self.X_wide is not None:
+ return len(self.X_wide)
+ if self.X_deep is not None:
+ return len(self.X_deep)
+ if self.X_text is not None:
+ return len(self.X_text)
+ if self.X_img is not None:
+ return len(self.X_img)
diff --git a/pytorch_widedeep/models/wide_deep.py b/pytorch_widedeep/models/wide_deep.py
index 70ce529b..84757d6f 100644
--- a/pytorch_widedeep/models/wide_deep.py
+++ b/pytorch_widedeep/models/wide_deep.py
@@ -1,5 +1,4 @@
import os
-import warnings
import numpy as np
import torch
@@ -21,6 +20,9 @@
from ._multiple_transforms import MultipleTransforms
from ._multiple_lr_scheduler import MultipleLRScheduler
+# import warnings
+
+
n_cpus = os.cpu_count()
use_cuda = torch.cuda.is_available()
@@ -104,37 +106,24 @@ class WideDeep(nn.Module):
"""
- def __init__(
+ def __init__( # noqa: C901
self,
- wide: nn.Module,
- deepdense: nn.Module,
- pred_dim: int = 1,
+ wide: Optional[nn.Module] = None,
+ deepdense: Optional[nn.Module] = None,
deeptext: Optional[nn.Module] = None,
deepimage: Optional[nn.Module] = None,
deephead: Optional[nn.Module] = None,
head_layers: Optional[List[int]] = None,
head_dropout: Optional[List] = None,
head_batchnorm: Optional[bool] = None,
+ pred_dim: int = 1,
):
super(WideDeep, self).__init__()
- # check that model components have the required output_dim attribute
- if not hasattr(deepdense, "output_dim"):
- raise AttributeError(
- "deepdense model must have an 'output_dim' attribute. "
- "See pytorch-widedeep.models.deep_dense.DeepDense"
- )
- if deeptext is not None and not hasattr(deeptext, "output_dim"):
- raise AttributeError(
- "deeptext model must have an 'output_dim' attribute. "
- "See pytorch-widedeep.models.deep_dense.DeepText"
- )
- if deepimage is not None and not hasattr(deepimage, "output_dim"):
- raise AttributeError(
- "deepimage model must have an 'output_dim' attribute. "
- "See pytorch-widedeep.models.deep_dense.DeepText"
- )
+ self._check_params(
+ deepdense, deeptext, deepimage, deephead, head_layers, head_dropout
+ )
# required as attribute just in case we pass a deephead
self.pred_dim = pred_dim
@@ -146,17 +135,11 @@ def __init__(
self.deepimage = deepimage
self.deephead = deephead
- if deephead is not None and head_layers is not None:
- warnings.simplefilter("module")
- warnings.warn(
- "both 'deephead' and 'head_layers' are not None."
- "'deephead' takes priority and will be used",
- UserWarning,
- )
-
if self.deephead is None:
if head_layers is not None:
- input_dim: int = self.deepdense.output_dim # type:ignore
+ input_dim = 0
+ if self.deepdense is not None:
+ input_dim += self.deepdense.output_dim # type:ignore
if self.deeptext is not None:
input_dim += self.deeptext.output_dim # type:ignore
if self.deepimage is not None:
@@ -179,9 +162,10 @@ def __init__(
"head_out", nn.Linear(head_layers[-1], pred_dim)
)
else:
- self.deepdense = nn.Sequential(
- self.deepdense, nn.Linear(self.deepdense.output_dim, pred_dim) # type: ignore
- )
+ if self.deepdense is not None:
+ self.deepdense = nn.Sequential(
+ self.deepdense, nn.Linear(self.deepdense.output_dim, pred_dim) # type: ignore
+ )
if self.deeptext is not None:
self.deeptext = nn.Sequential(
self.deeptext, nn.Linear(self.deeptext.output_dim, pred_dim) # type: ignore
@@ -190,34 +174,42 @@ def __init__(
self.deepimage = nn.Sequential(
self.deepimage, nn.Linear(self.deepimage.output_dim, pred_dim) # type: ignore
)
- else:
- self.deephead
+ # else:
+ # self.deephead
- def forward(self, X: Dict[str, Tensor]) -> Tensor: # type: ignore
+ def forward(self, X: Dict[str, Tensor]) -> Tensor: # type: ignore # noqa: C901
# Wide output: direct connection to the output neuron(s)
- out = self.wide(X["wide"])
+ if self.wide is not None:
+ out = self.wide(X["wide"])
+ else:
+ batch_size = X[list(X.keys())[0]].size(0)
+ out = torch.zeros(batch_size, self.pred_dim)
# Deep output: either connected directly to the output neuron(s) or
# passed through a head first
if self.deephead:
- deepside = self.deepdense(X["deepdense"])
+ if self.deepdense is not None:
+ deepside = self.deepdense(X["deepdense"])
+ else:
+ deepside = torch.FloatTensor()
if self.deeptext is not None:
deepside = torch.cat([deepside, self.deeptext(X["deeptext"])], axis=1) # type: ignore
if self.deepimage is not None:
deepside = torch.cat([deepside, self.deepimage(X["deepimage"])], axis=1) # type: ignore
deephead_out = self.deephead(deepside)
deepside_out = nn.Linear(deephead_out.size(1), self.pred_dim)(deephead_out)
- return out.add(deepside_out)
+ return out.add_(deepside_out)
else:
- out.add(self.deepdense(X["deepdense"]))
+ if self.deepdense is not None:
+ out.add_(self.deepdense(X["deepdense"]))
if self.deeptext is not None:
- out.add(self.deeptext(X["deeptext"]))
+ out.add_(self.deeptext(X["deeptext"]))
if self.deepimage is not None:
- out.add(self.deepimage(X["deepimage"]))
+ out.add_(self.deepimage(X["deepimage"]))
return out
- def compile(
+ def compile( # noqa: C901
self,
method: str,
optimizers: Optional[Union[Optimizer, Dict[str, Optimizer]]] = None,
@@ -372,14 +364,7 @@ def compile(
if optimizers is not None:
if isinstance(optimizers, Optimizer):
self.optimizer: Union[Optimizer, MultipleOptimizer] = optimizers
- elif isinstance(optimizers, Dict) and len(optimizers) == 1:
- raise ValueError(
- "The dictionary of optimizers must contain one item per model component, "
- "i.e. at least two for the 'wide' and 'deepdense' components. Otherwise "
- "pass one Optimizer object that will be used for all components"
- "i.e. optimizers = torch.optim.Adam(model.parameters())"
- )
- elif len(optimizers) > 1:
+ elif isinstance(optimizers, Dict):
opt_names = list(optimizers.keys())
mod_names = [n for n, c in self.named_children()]
for mn in mod_names:
@@ -430,7 +415,7 @@ def compile(
if use_cuda:
self.cuda()
- def fit(
+ def fit( # noqa: C901
self,
X_wide: Optional[np.ndarray] = None,
X_deep: Optional[np.ndarray] = None,
@@ -590,13 +575,6 @@ def fit(
"""
- if X_train is None and (X_wide is None or X_deep is None or target is None):
- raise ValueError(
- "Training data is missing. Either a dictionary (X_train) with "
- "the training dataset or at least 3 arrays (X_wide, X_deep, "
- "target) must be passed to the fit method"
- )
-
self.batch_size = batch_size
train_set, eval_set = self._train_val_split(
X_wide, X_deep, X_text, X_img, X_train, X_val, val_split, target
@@ -807,7 +785,7 @@ def _loss_fn(self, y_pred: Tensor, y_true: Tensor) -> Tensor: # type: ignore
if self.method == "multiclass":
return F.cross_entropy(y_pred, y_true, weight=self.class_weight)
- def _train_val_split(
+ def _train_val_split( # noqa: C901
self,
X_wide: Optional[np.ndarray] = None,
X_deep: Optional[np.ndarray] = None,
@@ -835,100 +813,51 @@ def _train_val_split(
:obj:`torch.utils.data.DataLoader`. See
:class:`pytorch_widedeep.models._wd_dataset`
"""
- # Without validation
- if X_val is None and val_split is None:
- # if a train dictionary is passed, check if text and image datasets
- # are present and instantiate the WideDeepDataset class
- if X_train is not None:
- X_wide, X_deep, target = (
- X_train["X_wide"],
- X_train["X_deep"],
- X_train["target"],
- )
- if "X_text" in X_train.keys():
- X_text = X_train["X_text"]
- if "X_img" in X_train.keys():
- X_img = X_train["X_img"]
- X_train = {"X_wide": X_wide, "X_deep": X_deep, "target": target}
- try:
- X_train.update({"X_text": X_text})
- except:
- pass
- try:
- X_train.update({"X_img": X_img})
- except:
- pass
+
+ if X_val is not None:
+ assert (
+ X_train is not None
+ ), "if the validation set is passed as a dictionary, the training set must also be a dictionary"
train_set = WideDeepDataset(**X_train, transforms=self.transforms) # type: ignore
- eval_set = None
- # With validation
- else:
- if X_val is not None:
- # if a validation dictionary is passed, then if not train
- # dictionary is passed we build it with the input arrays
- # (either the dictionary or the arrays must be passed)
- if X_train is None:
- X_train = {"X_wide": X_wide, "X_deep": X_deep, "target": target}
- if X_text is not None:
- X_train.update({"X_text": X_text})
- if X_img is not None:
- X_train.update({"X_img": X_img})
- else:
- # if a train dictionary is passed, check if text and image
- # datasets are present. The train/val split using val_split
- if X_train is not None:
- X_wide, X_deep, target = (
- X_train["X_wide"],
- X_train["X_deep"],
- X_train["target"],
- )
- if "X_text" in X_train.keys():
- X_text = X_train["X_text"]
- if "X_img" in X_train.keys():
- X_img = X_train["X_img"]
- (
- X_tr_wide,
- X_val_wide,
- X_tr_deep,
- X_val_deep,
- y_tr,
- y_val,
- ) = train_test_split(
- X_wide,
- X_deep,
- target,
- test_size=val_split,
- random_state=self.seed,
- stratify=target if self.method != "regression" else None,
+ eval_set = WideDeepDataset(**X_val, transforms=self.transforms) # type: ignore
+ elif val_split is not None:
+ if not X_train:
+ X_train = self._build_train_dict(X_wide, X_deep, X_text, X_img, target)
+ y_tr, y_val, idx_tr, idx_val = train_test_split(
+ X_train["target"],
+ np.arange(len(X_train["target"])),
+ test_size=val_split,
+ stratify=X_train["target"] if self.method != "regression" else None,
+ )
+ X_tr, X_val = {"target": y_tr}, {"target": y_val}
+ if "X_wide" in X_train.keys():
+ X_tr["X_wide"], X_val["X_wide"] = (
+ X_train["X_wide"][idx_tr],
+ X_train["X_wide"][idx_val],
)
- X_train = {"X_wide": X_tr_wide, "X_deep": X_tr_deep, "target": y_tr}
- X_val = {"X_wide": X_val_wide, "X_deep": X_val_deep, "target": y_val}
- try:
- X_tr_text, X_val_text = train_test_split(
- X_text,
- test_size=val_split,
- random_state=self.seed,
- stratify=target if self.method != "regression" else None,
- )
- X_train.update({"X_text": X_tr_text}), X_val.update(
- {"X_text": X_val_text}
- )
- except:
- pass
- try:
- X_tr_img, X_val_img = train_test_split(
- X_img,
- test_size=val_split,
- random_state=self.seed,
- stratify=target if self.method != "regression" else None,
- )
- X_train.update({"X_img": X_tr_img}), X_val.update(
- {"X_img": X_val_img}
- )
- except:
- pass
- # At this point the X_train and X_val dictionaries have been built
- train_set = WideDeepDataset(**X_train, transforms=self.transforms) # type: ignore
+ if "X_deep" in X_train.keys():
+ X_tr["X_deep"], X_val["X_deep"] = (
+ X_train["X_deep"][idx_tr],
+ X_train["X_deep"][idx_val],
+ )
+ if "X_text" in X_train.keys():
+ X_tr["X_text"], X_val["X_text"] = (
+ X_train["X_text"][idx_tr],
+ X_train["X_text"][idx_val],
+ )
+ if "X_img" in X_train.keys():
+ X_tr["X_img"], X_val["X_img"] = (
+ X_train["X_img"][idx_tr],
+ X_train["X_img"][idx_val],
+ )
+ train_set = WideDeepDataset(**X_tr, transforms=self.transforms) # type: ignore
eval_set = WideDeepDataset(**X_val, transforms=self.transforms) # type: ignore
+ else:
+ if not X_train:
+ X_train = self._build_train_dict(X_wide, X_deep, X_text, X_img, target)
+ train_set = WideDeepDataset(**X_train, transforms=self.transforms) # type: ignore
+ eval_set = None
+
return train_set, eval_set
def _warm_up(
@@ -981,7 +910,7 @@ def _warm_up(
else:
warmer.warm_all(self.deepimage, "deepimage", loader, n_epochs, max_lr)
- def _lr_scheduler_step(self, step_location: str):
+ def _lr_scheduler_step(self, step_location: str): # noqa: C901
r"""
Function to execute the learning rate schedulers steps.
If the lr_scheduler is Cyclic (i.e. CyclicLR or OneCycleLR), the step
@@ -1095,7 +1024,7 @@ def _predict(
num_workers=n_cpus,
shuffle=False,
)
- test_steps = (len(test_loader.dataset) // test_loader.batch_size) + 1
+ test_steps = (len(test_loader.dataset) // test_loader.batch_size) + 1 # type: ignore[arg-type]
self.eval()
preds_l = []
@@ -1113,3 +1042,49 @@ def _predict(
preds_l.append(preds)
self.train()
return preds_l
+
+ @staticmethod
+ def _build_train_dict(X_wide, X_deep, X_text, X_img, target):
+ X_train = {"target": target}
+ if X_wide is not None:
+ X_train["X_wide"] = X_wide
+ if X_deep is not None:
+ X_train["X_deep"] = X_deep
+ if X_text is not None:
+ X_train["X_text"] = X_text
+ if X_img is not None:
+ X_train["X_img"] = X_img
+ return X_train
+
+ @staticmethod
+ def _check_params(
+ deepdense, deeptext, deepimage, deephead, head_layers, head_dropout
+ ):
+
+ if deepdense is not None and not hasattr(deepdense, "output_dim"):
+ raise AttributeError(
+ "deepdense model must have an 'output_dim' attribute. "
+ "See pytorch-widedeep.models.deep_dense.DeepText"
+ )
+ if deeptext is not None and not hasattr(deeptext, "output_dim"):
+ raise AttributeError(
+ "deeptext model must have an 'output_dim' attribute. "
+ "See pytorch-widedeep.models.deep_dense.DeepText"
+ )
+ if deepimage is not None and not hasattr(deepimage, "output_dim"):
+ raise AttributeError(
+ "deepimage model must have an 'output_dim' attribute. "
+ "See pytorch-widedeep.models.deep_dense.DeepText"
+ )
+ if deephead is not None and head_layers is not None:
+ raise ValueError(
+ "both 'deephead' and 'head_layers' are not None. Use one of the other, but not both"
+ )
+ if head_layers is not None and not deepdense and not deeptext and not deepimage:
+ raise ValueError(
+ "if 'head_layers' is not None, at least one deep component must be used"
+ )
+ if head_layers is not None and head_dropout is not None:
+ assert len(head_layers) == len(
+ head_dropout
+ ), "'head_layers' and 'head_dropout' must have the same length"
diff --git a/setup.py b/setup.py
index 9f9d8702..33619d0e 100644
--- a/setup.py
+++ b/setup.py
@@ -33,7 +33,7 @@
]
extras["quality"] = [
"black",
- "isort @ git+git://github.com/timothycrosley/isort.git@e63ae06ec7d70b06df9e528357650281a3d3ec22#egg=isort",
+ "isort",
"flake8",
]
diff --git a/tests/test_model_components/test_wide_deep.py b/tests/test_model_components/test_wide_deep.py
index 5a6fc249..1e822862 100644
--- a/tests/test_model_components/test_wide_deep.py
+++ b/tests/test_model_components/test_wide_deep.py
@@ -55,7 +55,7 @@ def test_history_callback(deepcomponent, component_name):
def test_deephead_and_head_layers():
deephead = nn.Sequential(nn.Linear(32, 16), nn.Linear(16, 8))
- with pytest.warns(UserWarning):
+ with pytest.raises(ValueError):
model = WideDeep( # noqa: F841
wide=wide, deepdense=deepdense, head_layers=[16, 8], deephead=deephead
)
diff --git a/tests/test_model_functioning/test_data_inputs.py b/tests/test_model_functioning/test_data_inputs.py
index da484fff..89f7021e 100644
--- a/tests/test_model_functioning/test_data_inputs.py
+++ b/tests/test_model_functioning/test_data_inputs.py
@@ -266,3 +266,49 @@ def test_widedeep_inputs(
model.history.epoch[0] == nepoch
and model.history._history["train_loss"] is not null
)
+
+
+@pytest.mark.parametrize(
+ "X_wide, X_deep, X_text, X_img, X_train, X_val, target",
+ [
+ (
+ X_wide,
+ X_deep,
+ X_text,
+ X_img,
+ None,
+ {
+ "X_wide": X_wide_val,
+ "X_deep": X_deep_val,
+ "X_text": X_text_val,
+ "X_img": X_img_val,
+ "target": y_val,
+ },
+ target,
+ ),
+ ],
+)
+def test_xtrain_xval_assertion(
+ X_wide,
+ X_deep,
+ X_text,
+ X_img,
+ X_train,
+ X_val,
+ target,
+):
+ model = WideDeep(
+ wide=wide, deepdense=deepdense, deeptext=deeptext, deepimage=deepimage
+ )
+ model.compile(method="binary", verbose=0)
+ with pytest.raises(AssertionError):
+ model.fit(
+ X_wide=X_wide,
+ X_deep=X_deep,
+ X_text=X_text,
+ X_img=X_img,
+ X_train=X_train,
+ X_val=X_val,
+ target=target,
+ batch_size=16,
+ )
From 72e961b45a0e01d63cd53bda0121ef349bd985ff Mon Sep 17 00:00:00 2001
From: jrzaurin
Date: Tue, 1 Dec 2020 00:42:47 +0100
Subject: [PATCH 04/10] Added more tests in test_data_inputs
---
pytorch_widedeep/models/wide_deep.py | 29 +++--
.../test_data_inputs.py | 102 +++++++++++++++++-
tests/test_warm_up/test_warm_up_routines.py | 4 +-
3 files changed, 124 insertions(+), 11 deletions(-)
diff --git a/pytorch_widedeep/models/wide_deep.py b/pytorch_widedeep/models/wide_deep.py
index 84757d6f..873008fc 100644
--- a/pytorch_widedeep/models/wide_deep.py
+++ b/pytorch_widedeep/models/wide_deep.py
@@ -667,8 +667,8 @@ def fit( # noqa: C901
def predict(
self,
- X_wide: np.ndarray,
- X_deep: np.ndarray,
+ X_wide: Optional[np.ndarray] = None,
+ X_deep: Optional[np.ndarray] = None,
X_text: Optional[np.ndarray] = None,
X_img: Optional[np.ndarray] = None,
X_test: Optional[Dict[str, np.ndarray]] = None,
@@ -711,8 +711,8 @@ def predict(
def predict_proba(
self,
- X_wide: np.ndarray,
- X_deep: np.ndarray,
+ X_wide: Optional[np.ndarray] = None,
+ X_deep: Optional[np.ndarray] = None,
X_text: Optional[np.ndarray] = None,
X_img: Optional[np.ndarray] = None,
X_test: Optional[Dict[str, np.ndarray]] = None,
@@ -998,8 +998,8 @@ def _validation_step(self, data: Dict[str, Tensor], target: Tensor, batch_idx: i
def _predict(
self,
- X_wide: np.ndarray,
- X_deep: np.ndarray,
+ X_wide: Optional[np.ndarray] = None,
+ X_deep: Optional[np.ndarray] = None,
X_text: Optional[np.ndarray] = None,
X_img: Optional[np.ndarray] = None,
X_test: Optional[Dict[str, np.ndarray]] = None,
@@ -1056,7 +1056,7 @@ def _build_train_dict(X_wide, X_deep, X_text, X_img, target):
X_train["X_img"] = X_img
return X_train
- @staticmethod
+ @staticmethod # noqa: C901
def _check_params(
deepdense, deeptext, deepimage, deephead, head_layers, head_dropout
):
@@ -1088,3 +1088,18 @@ def _check_params(
assert len(head_layers) == len(
head_dropout
), "'head_layers' and 'head_dropout' must have the same length"
+ if deephead is not None:
+ deephead_inp_feat = next(deephead.parameters()).size(1)
+ output_dim = 0
+ if deepdense is not None:
+ output_dim += deepdense.output_dim
+ if deeptext is not None:
+ output_dim += deeptext.output_dim
+ if deepimage is not None:
+ output_dim += deepimage.output_dim
+ assert deephead_inp_feat == output_dim, (
+ "if a custom 'deephead' is used its input features ({}) must be equal to "
+ "the output features of the deep component ({})".format(
+ deephead_inp_feat, output_dim
+ )
+ )
diff --git a/tests/test_model_functioning/test_data_inputs.py b/tests/test_model_functioning/test_data_inputs.py
index 89f7021e..37f2f8b2 100644
--- a/tests/test_model_functioning/test_data_inputs.py
+++ b/tests/test_model_functioning/test_data_inputs.py
@@ -3,6 +3,7 @@
import numpy as np
import pytest
from torchvision.transforms import ToTensor, Normalize
+from torch import nn
from sklearn.model_selection import train_test_split
from pytorch_widedeep.models import (
@@ -67,11 +68,16 @@
transforms1 = [ToTensor, Normalize(mean=mean, std=std)]
transforms2 = [Normalize(mean=mean, std=std)]
+deephead_ds = nn.Sequential(nn.Linear(16, 8), nn.Linear(8, 4))
+deephead_dt = nn.Sequential(nn.Linear(64, 8), nn.Linear(8, 4))
+deephead_di = nn.Sequential(nn.Linear(512, 8), nn.Linear(8, 4))
-##############################################################################
+# #############################################################################
# Test many possible scenarios of data inputs I can think off. Surely users
# will input something unexpected
-##############################################################################
+# #############################################################################
+
+
@pytest.mark.parametrize(
"X_wide, X_deep, X_text, X_img, X_train, X_val, target, val_split, transforms, nepoch, null",
[
@@ -312,3 +318,95 @@ def test_xtrain_xval_assertion(
target=target,
batch_size=16,
)
+
+
+@pytest.mark.parametrize(
+ "wide, deepdense, deeptext, deepimage, X_wide, X_deep, X_text, X_img, target",
+ [
+ (wide, None, None, None, X_wide, None, None, None, target),
+ (None, deepdense, None, None, None, X_deep, None, None, target),
+ (None, None, deeptext, None, None, None, X_text, None, target),
+ (None, None, None, deepimage, None, None, None, X_img, target),
+ ],
+)
+def test_individual_inputs(
+ wide, deepdense, deeptext, deepimage, X_wide, X_deep, X_text, X_img, target
+):
+ model = WideDeep(
+ wide=wide, deepdense=deepdense, deeptext=deeptext, deepimage=deepimage
+ )
+ model.compile(method="binary", verbose=0)
+ model.fit(
+ X_wide=X_wide,
+ X_deep=X_deep,
+ X_text=X_text,
+ X_img=X_img,
+ target=target,
+ batch_size=16,
+ )
+ # check it has run succesfully
+ assert len(model.history._history) == 1
+
+
+###############################################################################
+# test deephead is not None and individual components
+###############################################################################
+
+
+@pytest.mark.parametrize(
+ "deepdense, deeptext, deepimage, X_deep, X_text, X_img, deephead, target",
+ [
+ (deepdense, None, None, X_deep, None, None, deephead_ds, target),
+ (None, deeptext, None, None, X_text, None, deephead_dt, target),
+ (None, None, deepimage, None, None, X_img, deephead_di, target),
+ ],
+)
+def test_deephead_individual_components(
+ deepdense, deeptext, deepimage, X_deep, X_text, X_img, deephead, target
+):
+ model = WideDeep(
+ deepdense=deepdense, deeptext=deeptext, deepimage=deepimage, deephead=deephead
+ ) # noqa: F841
+ model.compile(method="binary", verbose=0)
+ model.fit(
+ X_wide=X_wide,
+ X_deep=X_deep,
+ X_text=X_text,
+ X_img=X_img,
+ target=target,
+ batch_size=16,
+ )
+ # check it has run succesfully
+ assert len(model.history._history) == 1
+
+
+###############################################################################
+# test deephead is None and head_layers is not None and individual components
+###############################################################################
+
+
+@pytest.mark.parametrize(
+ "deepdense, deeptext, deepimage, X_deep, X_text, X_img, target",
+ [
+ (deepdense, None, None, X_deep, None, None, target),
+ (None, deeptext, None, None, X_text, None, target),
+ (None, None, deepimage, None, None, X_img, target),
+ ],
+)
+def test_head_layers_individual_components(
+ deepdense, deeptext, deepimage, X_deep, X_text, X_img, target
+):
+ model = WideDeep(
+ deepdense=deepdense, deeptext=deeptext, deepimage=deepimage, head_layers=[8, 4]
+ ) # noqa: F841
+ model.compile(method="binary", verbose=0)
+ model.fit(
+ X_wide=X_wide,
+ X_deep=X_deep,
+ X_text=X_text,
+ X_img=X_img,
+ target=target,
+ batch_size=16,
+ )
+ # check it has run succesfully
+ assert len(model.history._history) == 1
diff --git a/tests/test_warm_up/test_warm_up_routines.py b/tests/test_warm_up/test_warm_up_routines.py
index c5611d77..2fd1c951 100644
--- a/tests/test_warm_up/test_warm_up_routines.py
+++ b/tests/test_warm_up/test_warm_up_routines.py
@@ -161,7 +161,7 @@ def test_warm_all(model, modelname, loader, n_epochs, max_lr):
has_run = True
try:
warmer.warm_all(model, modelname, loader, n_epochs, max_lr)
- except:
+ except Exception:
has_run = False
assert has_run
@@ -182,6 +182,6 @@ def test_warm_gradual(model, modelname, loader, max_lr, layers, routine):
has_run = True
try:
warmer.warm_gradual(model, modelname, loader, max_lr, layers, routine)
- except:
+ except Exception:
has_run = False
assert has_run
From e75ae5a5edff61237d4e462afe5363f986b78f21 Mon Sep 17 00:00:00 2001
From: jrzaurin
Date: Thu, 3 Dec 2020 12:02:52 +0100
Subject: [PATCH 05/10] Added a few tests to increase coverage
---
pytorch_widedeep/models/wide_deep.py | 41 +++-
.../test_data_inputs.py | 2 +-
.../test_miscellaneous.py | 194 ++++++++++++++++++
3 files changed, 228 insertions(+), 9 deletions(-)
create mode 100644 tests/test_model_functioning/test_miscellaneous.py
diff --git a/pytorch_widedeep/models/wide_deep.py b/pytorch_widedeep/models/wide_deep.py
index 873008fc..b4d5d3a0 100644
--- a/pytorch_widedeep/models/wide_deep.py
+++ b/pytorch_widedeep/models/wide_deep.py
@@ -121,8 +121,15 @@ def __init__( # noqa: C901
super(WideDeep, self).__init__()
- self._check_params(
- deepdense, deeptext, deepimage, deephead, head_layers, head_dropout
+ self._check_model_components(
+ wide,
+ deepdense,
+ deeptext,
+ deepimage,
+ deephead,
+ head_layers,
+ head_dropout,
+ pred_dim,
)
# required as attribute just in case we pass a deephead
@@ -337,9 +344,9 @@ def compile( # noqa: C901
if isinstance(optimizers, Dict) and not isinstance(lr_schedulers, Dict):
raise ValueError(
- "'parameters 'optimizers' and 'lr_schedulers' must have consistent type. "
- "(Optimizer, LRScheduler) or (Dict[str, Optimizer], Dict[str, LRScheduler]) "
- "Please, read the Documentation for more details"
+ "''optimizers' and 'lr_schedulers' must have consistent type: "
+ "(Optimizer and LRScheduler) or (Dict[str, Optimizer] and Dict[str, LRScheduler]) "
+ "Please, read the documentation or see the examples for more details"
)
self.verbose = verbose
@@ -1011,7 +1018,11 @@ def _predict(
if X_test is not None:
test_set = WideDeepDataset(**X_test)
else:
- load_dict = {"X_wide": X_wide, "X_deep": X_deep}
+ load_dict = {}
+ if X_wide is not None:
+ load_dict = {"X_wide": X_wide}
+ if X_deep is not None:
+ load_dict.update({"X_deep": X_deep})
if X_text is not None:
load_dict.update({"X_text": X_text})
if X_img is not None:
@@ -1057,10 +1068,24 @@ def _build_train_dict(X_wide, X_deep, X_text, X_img, target):
return X_train
@staticmethod # noqa: C901
- def _check_params(
- deepdense, deeptext, deepimage, deephead, head_layers, head_dropout
+ def _check_model_components(
+ wide,
+ deepdense,
+ deeptext,
+ deepimage,
+ deephead,
+ head_layers,
+ head_dropout,
+ pred_dim,
):
+ if wide is not None:
+ assert wide.wide_linear.weight.size(1) == pred_dim, (
+ "the 'pred_dim' of the wide component ({}) must be equal to the 'pred_dim' "
+ "of the deep component and the overall model itself ({})".format(
+ wide.wide_linear.weight.size(1), pred_dim
+ )
+ )
if deepdense is not None and not hasattr(deepdense, "output_dim"):
raise AttributeError(
"deepdense model must have an 'output_dim' attribute. "
diff --git a/tests/test_model_functioning/test_data_inputs.py b/tests/test_model_functioning/test_data_inputs.py
index 37f2f8b2..483a8670 100644
--- a/tests/test_model_functioning/test_data_inputs.py
+++ b/tests/test_model_functioning/test_data_inputs.py
@@ -2,8 +2,8 @@
import numpy as np
import pytest
-from torchvision.transforms import ToTensor, Normalize
from torch import nn
+from torchvision.transforms import ToTensor, Normalize
from sklearn.model_selection import train_test_split
from pytorch_widedeep.models import (
diff --git a/tests/test_model_functioning/test_miscellaneous.py b/tests/test_model_functioning/test_miscellaneous.py
new file mode 100644
index 00000000..5c518fd7
--- /dev/null
+++ b/tests/test_model_functioning/test_miscellaneous.py
@@ -0,0 +1,194 @@
+import string
+
+import numpy as np
+import pytest
+import torch
+
+from sklearn.model_selection import train_test_split
+
+from pytorch_widedeep.models import (
+ Wide,
+ DeepText,
+ WideDeep,
+ DeepDense,
+ DeepImage,
+)
+from pytorch_widedeep.callbacks import EarlyStopping
+from pytorch_widedeep.metrics import Accuracy, Precision
+
+# Wide array
+X_wide = np.random.choice(50, (32, 10))
+
+# Deep Array
+colnames = list(string.ascii_lowercase)[:10]
+embed_cols = [np.random.choice(np.arange(5), 32) for _ in range(5)]
+embed_input = [(u, i, j) for u, i, j in zip(colnames[:5], [5] * 5, [16] * 5)]
+cont_cols = [np.random.rand(32) for _ in range(5)]
+X_deep = np.vstack(embed_cols + cont_cols).transpose()
+
+# Text Array
+padded_sequences = np.random.choice(np.arange(1, 100), (32, 48))
+X_text = np.hstack((np.repeat(np.array([[0, 0]]), 32, axis=0), padded_sequences))
+vocab_size = 100
+
+# Image Array
+X_img = np.random.choice(256, (32, 224, 224, 3))
+X_img_norm = X_img / 255.0
+
+# Target
+target = np.random.choice(2, 32)
+target_multi = np.random.choice(3, 32)
+
+# train/validation split
+(
+ X_wide_tr,
+ X_wide_val,
+ X_deep_tr,
+ X_deep_val,
+ X_text_tr,
+ X_text_val,
+ X_img_tr,
+ X_img_val,
+ y_train,
+ y_val,
+) = train_test_split(X_wide, X_deep, X_text, X_img, target)
+
+# build model components
+wide = Wide(np.unique(X_wide).shape[0], 1)
+deepdense = DeepDense(
+ hidden_layers=[32, 16],
+ dropout=[0.5, 0.5],
+ deep_column_idx={k: v for v, k in enumerate(colnames)},
+ embed_input=embed_input,
+ continuous_cols=colnames[-5:],
+)
+deeptext = DeepText(vocab_size=vocab_size, embed_dim=32, padding_idx=0)
+deepimage = DeepImage(pretrained=True)
+
+###############################################################################
+# test consistecy between optimizers and lr_schedulers format
+###############################################################################
+
+
+def test_optimizer_scheduler_format():
+ model = WideDeep(deepdense=deepdense)
+ optimizers = {"deepdense": torch.optim.Adam(model.deepdense.parameters(), lr=0.01)}
+ schedulers = torch.optim.lr_scheduler.StepLR(optimizers["deepdense"], step_size=3)
+ with pytest.raises(ValueError):
+ model.compile(
+ method="binary",
+ optimizers=optimizers,
+ lr_schedulers=schedulers,
+ )
+
+
+###############################################################################
+# test that callbacks are properly initialised internally
+###############################################################################
+
+
+def test_non_instantiated_callbacks():
+ model = WideDeep(wide=wide, deepdense=deepdense)
+ callbacks = [EarlyStopping]
+ model.compile(method="binary", callbacks=callbacks)
+ assert model.callbacks[1].__class__.__name__ == "EarlyStopping"
+
+
+###############################################################################
+# test that multiple metrics are properly constructed internally
+###############################################################################
+
+
+def test_multiple_metrics():
+ model = WideDeep(wide=wide, deepdense=deepdense)
+ metrics = [Accuracy, Precision]
+ model.compile(method="binary", metrics=metrics)
+ assert (
+ model.metric._metrics[0].__class__.__name__ == "Accuracy"
+ and model.metric._metrics[1].__class__.__name__ == "Precision"
+ )
+
+
+###############################################################################
+# test the train step with metrics runs well for a binary prediction
+###############################################################################
+
+def test_basic_run_with_metrics_binary():
+ model = WideDeep(wide=wide, deepdense=deepdense)
+ model.compile(method="binary", metrics=[Accuracy], verbose=False)
+ model.fit(
+ X_wide=X_wide,
+ X_deep=X_deep,
+ target=target,
+ n_epochs=1,
+ batch_size=16,
+ val_split=0.2,
+ )
+ assert (
+ "train_loss" in model.history._history.keys()
+ and "train_acc" in model.history._history.keys()
+ )
+
+
+###############################################################################
+# test the train step with metrics runs well for a muticlass prediction
+###############################################################################
+
+def test_basic_run_with_metrics_multiclass():
+ wide = Wide(np.unique(X_wide).shape[0], 3)
+ deepdense = DeepDense(
+ hidden_layers=[32, 16],
+ dropout=[0.5, 0.5],
+ deep_column_idx={k: v for v, k in enumerate(colnames)},
+ embed_input=embed_input,
+ continuous_cols=colnames[-5:],
+ )
+ model = WideDeep(wide=wide, deepdense=deepdense, pred_dim=3)
+ model.compile(method="multiclass", metrics=[Accuracy], verbose=False)
+ model.fit(
+ X_wide=X_wide,
+ X_deep=X_deep,
+ target=target_multi,
+ n_epochs=1,
+ batch_size=16,
+ val_split=0.2,
+ )
+ assert (
+ "train_loss" in model.history._history.keys()
+ and "train_acc" in model.history._history.keys()
+ )
+
+
+###############################################################################
+# test predict method for individual components
+###############################################################################
+
+@pytest.mark.parametrize(
+ "wide, deepdense, deeptext, deepimage, X_wide, X_deep, X_text, X_img, target",
+ [
+ (wide, None, None, None, X_wide, None, None, None, target),
+ (None, deepdense, None, None, None, X_deep, None, None, target),
+ (None, None, deeptext, None, None, None, X_text, None, target),
+ (None, None, None, deepimage, None, None, None, X_img, target),
+ ],
+)
+def test_predict_with_individual_component(
+ wide, deepdense, deeptext, deepimage, X_wide, X_deep, X_text, X_img, target
+):
+
+ model = WideDeep(
+ wide=wide, deepdense=deepdense, deeptext=deeptext, deepimage=deepimage
+ )
+ model.compile(method="binary", verbose=0)
+ model.fit(
+ X_wide=X_wide,
+ X_deep=X_deep,
+ X_text=X_text,
+ X_img=X_img,
+ target=target,
+ batch_size=16,
+ )
+ # simply checking that runs and produces outputs
+ preds = model.predict(X_wide=X_wide, X_deep=X_deep, X_text=X_text, X_img=X_img)
+
+ assert preds.shape[0] == 32 and "train_loss" in model.history._history
From 3b0f9b7eb2c609082eb75e2a85a30d680118ffc6 Mon Sep 17 00:00:00 2001
From: jrzaurin
Date: Thu, 3 Dec 2020 12:15:17 +0100
Subject: [PATCH 06/10] Fix minor style conflicts
---
tests/test_model_functioning/test_miscellaneous.py | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/tests/test_model_functioning/test_miscellaneous.py b/tests/test_model_functioning/test_miscellaneous.py
index 5c518fd7..140d5a76 100644
--- a/tests/test_model_functioning/test_miscellaneous.py
+++ b/tests/test_model_functioning/test_miscellaneous.py
@@ -1,9 +1,8 @@
import string
import numpy as np
-import pytest
import torch
-
+import pytest
from sklearn.model_selection import train_test_split
from pytorch_widedeep.models import (
@@ -13,8 +12,8 @@
DeepDense,
DeepImage,
)
-from pytorch_widedeep.callbacks import EarlyStopping
from pytorch_widedeep.metrics import Accuracy, Precision
+from pytorch_widedeep.callbacks import EarlyStopping
# Wide array
X_wide = np.random.choice(50, (32, 10))
@@ -113,6 +112,7 @@ def test_multiple_metrics():
# test the train step with metrics runs well for a binary prediction
###############################################################################
+
def test_basic_run_with_metrics_binary():
model = WideDeep(wide=wide, deepdense=deepdense)
model.compile(method="binary", metrics=[Accuracy], verbose=False)
@@ -134,6 +134,7 @@ def test_basic_run_with_metrics_binary():
# test the train step with metrics runs well for a muticlass prediction
###############################################################################
+
def test_basic_run_with_metrics_multiclass():
wide = Wide(np.unique(X_wide).shape[0], 3)
deepdense = DeepDense(
@@ -163,6 +164,7 @@ def test_basic_run_with_metrics_multiclass():
# test predict method for individual components
###############################################################################
+
@pytest.mark.parametrize(
"wide, deepdense, deeptext, deepimage, X_wide, X_deep, X_text, X_img, target",
[
From 5a71fb7d03d3ec4a9459b842a2dacba60f0fa37e Mon Sep 17 00:00:00 2001
From: jrzaurin
Date: Thu, 3 Dec 2020 18:21:48 +0100
Subject: [PATCH 07/10] updated docs so they are consistent with new
functionalities. Updated logo. Updated README and fix a typo in setup.py
---
README.md | 32 +++++++++++++++------------
docs/figures/widedeep_logo.png | Bin 74544 -> 38911 bytes
docs/figures/widedeep_logo_old.png | Bin 0 -> 74544 bytes
pypi_README.md | 6 +----
pytorch_widedeep/models/wide_deep.py | 10 ---------
setup.py | 3 ++-
6 files changed, 21 insertions(+), 30 deletions(-)
create mode 100644 docs/figures/widedeep_logo_old.png
diff --git a/README.md b/README.md
index dad4f128..23ba9947 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
-
+
[![Build Status](https://travis-ci.org/jrzaurin/pytorch-widedeep.svg?branch=master)](https://travis-ci.org/jrzaurin/pytorch-widedeep)
@@ -9,11 +9,7 @@
[![Maintenance](https://img.shields.io/badge/Maintained%3F-yes-green.svg)](https://github.com/jrzaurin/pytorch-widedeep/graphs/commit-activity)
[![contributions welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=flat)](https://github.com/jrzaurin/pytorch-widedeep/issues)
[![codecov](https://codecov.io/gh/jrzaurin/pytorch-widedeep/branch/master/graph/badge.svg)](https://codecov.io/gh/jrzaurin/pytorch-widedeep)
-
-Platform | Version Support
----------|:---------------
-OSX | [![Python 3.6 3.7](https://img.shields.io/badge/python-3.6%20%7C%203.7-blue.svg)](https://www.python.org/)
-Linux | [![Python 3.6 3.7 3.8](https://img.shields.io/badge/python-3.6%20%7C%203.7%20%7C%203.8-blue.svg)](https://www.python.org/)
+[![Python 3.6 3.7 3.8](https://img.shields.io/badge/python-3.6%20%7C%203.7%20%7C%203.8-blue.svg)](https://www.python.org/)
# pytorch-widedeep
@@ -88,15 +84,23 @@ as:
-When using `pytorch-widedeep`, the assumption is that the so called `Wide` and
-`deep dense` (this can be either `DeepDense` or `DeepDenseResnet`. See the
-documentation and examples folder for more details) components in the figures
-are **always** present, while `DeepText text` and `DeepImage` are optional.
+Note that each individual component, `wide`, `deepdense` (either `DeepDense`
+or `DeepDenseResnet`), `deeptext` and `deepimage`, can be used independently
+and in isolation. For example, one could use only `wide`, which is in simply a
+linear model.
+
+On the other hand, while I recommend using the `Wide` and `DeepDense` (or
+`DeepDenseResnet`) classes in `pytorch-widedeep` to build the `wide` and
+`deepdense` component, it is very likely that users will want to use their own
+models in the case of the `deeptext` and `deepimage` components. That is
+perfectly possible as long as the the custom models have an attribute called
+`output_dim` with the size of the last layer of activations, so that
+`WideDeep` can be constructed
+
`pytorch-widedeep` includes standard text (stack of LSTMs) and image
-(pre-trained ResNets or stack of CNNs) models. However, the user can use any
-custom model as long as it has an attribute called `output_dim` with the size
-of the last layer of activations, so that `WideDeep` can be constructed. See
-the examples folder or the docs for more information.
+(pre-trained ResNets or stack of CNNs) models.
+
+See the examples folder or the docs for more information.
### Installation
diff --git a/docs/figures/widedeep_logo.png b/docs/figures/widedeep_logo.png
index a444feff993b3ba8cad46abc3a23d1b881f12df4..2c703fc66450d1857f76151f2a7476160b9df1cb 100644
GIT binary patch
literal 38911
zcmeGD1zTLr(gq6Smf#RHFt|f-cN^T@-QC^YJ-EBOI|L`ef@^Shmoty-{qFBOf8oq^
z^>k0KuCDG{YjsuKRTCjED~^bOivR`&hA06PQ3L~nNCX1|cZ7oh^@x#jd4Pc->{tj3
z%S#9g1LPg;Of9TUz`%eJDQU3E=*zhN7n4OvC;%b|MIpODX9yrXFUFCIvJe`Ol877%
zy0f7zure?lPSsvTq<;Z3z8UbPijwX0Qv)UCiD7@}IMO9t=gYL$MVH@n_Di7$uQ*1Sz^II6x|kBGY!LBJF|C^Pf#(ZDF3lQpRPuF~(>Kd2Bsv~4p2DUNZm0&9WfW4rrrxmq{9WHd^A
zID=4Mjq_Q)p=IM7zOPS=BB|DKn7A&HMm_$y|3jM$`$L*ZZa;}*%)7>}&d_H6G+sg$
z^T`Rr*O6l;i8lU8RHlK2TM?B;<#;g1xW<{fhKEEf9}9s4;p1uGuuWnbb9g$(SVK(t
zGC3wrh#^Is`Wh=7va59Gna1*G;#;vUs(3w{IQG}FvTW{@UV+J+YpN)<-5*$w&oawH
zl&55z8R3LyyB)~kLya*OYI5qz3P#_P0pAsrE7%k735)n<<(Hy=BmpW83
zU(%di`O_QJ*RW}Uv^?2EzIjMQ0lU1P-yo6IkZMRuL}4j&)Z{
zET=-&L@oH8SYDsK2)tld!+hcfx3M{tvY6>n()^`H=zdfW+KjMnX*y)L>8^bvh+7=e
z+F8FcaHr>k)eGN@5aG;^dqH2y@hjy@)IH!lvWp|q`(Jc5wDT*P(-7sg#Qpx
zGsI4inQ)l#rup(7;}2?dJwj!c;~*R3gv&7@1l%I#vk{Tq}!w2gWoG;l3CVF
zFPv1^Jx@qF$inWzI_%(Ml
zd@Fp5^c>%j(GlcX^-O$6eV25+{yErSVCx$b4j#5Rwi`}v>5;UN9J;g?ceU@zlm#J6
ziIuwr97`k%F3Y*O_itm%XA4vFRC6{9zTX4p8s?(rbn`3uF1l9sTs|GE&KtJ{N8%;?
zze1bonn;?ltQO7n76^XNEWrOtIJ92itYj{m_;oicvq(3`Tg;s&K6mpA{V3%q`?puW
zvROlV`Xs_W=YD)oi*H_!wqL7n*n46gN54Top(ESo?dak-
z<-oi19G01)6}k%E`mRyZ2-SE$dF;Y&f6#H^dU3DsN$PcaeRx0ru7l)AI37zh?8C`gga=w#K(giM|lc#;3}2uuwWH9TqPCXdG_L
z91y^!uvS|7V?TEZz1=IaMLa-KL7XGHB1Ru!9akVa6~~2g$G5`mDSSJK-H7cWtwqbq
zM%BW0FPvqkZK3VF65YAo57mpxjh1fiPkvE=~zhs29vhCj$~YtY4k2j%DG
z#pQ1n=N6f>Fu^$kj@x;o;>uK>*#zHvnBRoO)PM9u&rG&WhSCbDI{tR3&)^XDrq^Qk
zQgAIDQdFco(%tINzX>F=sB^CUoH&)jdhlyT?5JW*`aQdgVv(ek`r*&OQiwQKIXxOf
zM}}EKE8B%fkKcP-6l0V})N1M>gGPIi8I{-3xyi4FWD}bS#56403oX<-!w<>X@#vIA
z)#WN`ojY5Xwc}KX>_AND?oDWKHMKHrO1e|cKP{#Q58p3DH{Gh5^*UY7T8VvU_6#$3
zu6Gzt+)wB~^k0OBkqb|h7Tv7vSL%#NYzy80tfT#i7==}f=y?pW6j+KN8vC5OXI4Xo^!AJPU1=NpEQlM%o+|Ii>_n$yCv2Us}Zg&
zF5FsfJ-eQ5$HHPK)r0cP9QL1U*WE7LYzOx#+>IB`8!_!(wKV0GF9aXDA8(T{wS>ET
z*dA>=sP_spMkynEa!t9)AC~V{!mUaVV+5Nym2!nSe5ZSNJ+CtNt7$!fX31uIKksrD
z?^o|~u`dbRt_-K6deRcpNP8W7ul&*Sy?IaTvQ@lLqp%RICYgH!0dVNA
zqy+BXhO1y*(h0q|r5BU%Jo(u`tX;q8%i=bWDupPi!Mz@h6h>PuEg#^=s=vz#jnMuf)nW<$pZ$Jh&pUONu
z84=k7fOW#y2xkkO^Jj8Hy%VAHe&|`g9(?}6WDoX{^#O&h_jQ&AO8OaO^hTPfOPI>Y
zfKh|ma9~j2xM0wr7C5Nzf#dzREe1{r2KjeC1Q=MD1sK%7V`M?~U!QnT`HSYC8Zt2y
z3>NeY4OHCoApSENA~6s0KW%VF&^0hYC1D8(P_1O-XkudPWNzoIw~uuV>VUTgYB+&`
zeIfg+fJ-QnUV`TTZK160tS%$XWn^bVZ(wX^XhQF9WB=DYU_91>@y{+Z-I^N5%@
z897?mJ6qV<0{)uUz|hXcnU|ROFGm0U{IgCIcZ>hyWb5?rwm=(X`0EM-BmFmq|IQ5}
z<@u|ZOWwlW#9Cd%!UiNW5Dz{!Rt}!O$N#^s{2#}EC{>(H9EI&{K!nbG|0nc+iU0TF
z|3Bg1EH(a*B{TcC|7H0L1lOF(`@MPmy{qflr;!ncg_M#J
z;gnQ}5VO_J>_9hoh*=B3yR<;x`MLj@VW-!J8NHu<`{{XgI;YvW&w+>g`D6q;pIO_>
zM!M66k=h{AFF~kqFjN49h(8<(II#efG^xILKvBd{8R!3WhWp2YK_@zs{5|47!@rCA
zLr65%DNul@{?ms7jzD@u@PD&``oBeiNKFk2W&gwB@39!bS>FF8Bi8={BApr#%l^Nq
zK=V?@7kT}=uz&jdXdy6w86h!TsDCZ?KV#(q5)c2oiEw`n5Gl(iJQw2sZVWUwPayI9
zf5}k58{y;u7GKzoMgBY0Ut0_yU5EVtS*>!d;
z{I6Eo++d1dzlsRhLsUQ|rkq9<-Zm;xzP&Rm4}C!1pREJFczx)%vH5`2AoIc;lswdb
zNS*Ryd{vbK94y%yU#!v=`Nf{y-Dc%mY
z4>}L`-6C{~>wPMJ_M9fbf{jD?>)Gkn)*0+kSc8!O3jQnPa{DdLPz|I@E|TwQQg=+f
zk(|^K00I$Jo-{sLS8Mk|C$8aw-v-z!;etezaOvv`p?zUxpvZ4Y%NCGv&2(>E##$t&
z5SQoRw7s66S+<{`V~R4$+4glgtTl5+X`lXd!Q^zivF_nEQR5rG1LntjY}dM#yu-HJ
za6&ISDM$-6VMz_~h3|Rz=e|Wwr8%l`c)b
zh%E~#nN!gzQF6ezlKp`gJCWstLV^O0P3A2|!{IJC2EJFi3{DwSNA3*b8UEI?6E;uh
zXZ`vp4><~|c$SP71;n$ZO6N4O7^3x&X-4)2xuY(|DH`_3oI938)_7WuS2VAbdOZg&
zS@sR^UJIJ-t-ZoFdc{22fUGyI1Rv+p21(lF^DVsa?{624dK<1^Bn!@oQOe4zMDV>3
zbagZ**YR^Hyol{jrO#c}Ed^vmIj>vj1;?<=XfaLddQXCEwzyq$Y!gcQe|REN#?M0*
z{3^a3U)$SX-FEzAW8Yp5|6*;({AnCRh`LFA8Dgv}yHde>A8{eeB|1Fdql?ZuC(NeG
zRT;5rJTkHLv)~aSe_7Rnv&+n?Zjh34)zR1TFng%+_jg1IbXYh8wb5O&OY1oWIIT(6
zf|3RZPMXsE(}UVcOan*$H13Qe0&-xKf;1=S$PP*EvdJDO*!EmqKi$l>@4iF2-@O*=
zo1~by))bqiB?CwSG)8qzFuLkT6t$$!8KyF?ayyT%Em@PilwdcYic%UKe3oBFsiAd)
z#!j_=eb=}=UlKMgCyj*-=wgop-0Y?BDC?CndoF%BJ5ASA{sIQva&g=J@m9$ZIVPFm
zP0y+uEm~Z|03Ivty4hT8L$|J8gq&ngZEf7@a*S+92axq{*8v-c1B{Or35m?;dexY1
z<9Nn&ohIwyAYAr^^BybX5;_h&_{L@CkDq{E!ISE?h76YU!~wT^%<_+8uxm1nq4SE0
zstnY>oB<0xeyYC3gsKW`X`XjANZhJUkFTdr?$5$wfPhlvKt*kzCb6c
zaB8%%xPVzkUbh=we{BV`r6V}7grs867Wh5gJj(0PcN>fE36w}QLdq)eNW@>7sAf7}
zrjSo@lCZ5OpSbZ(e&a~5Stq2x#CjLlUqcY
zuAT>)MY&$OmmzAA$z~?o6C}VaXkh^NZ4Htt`8`?%p)HkHykG(5>jq1mUS;{Q&Z8m#
z3vIpR;Ww%zmc3yS8@_2(ayC_6R15iN0Z~gPv`{xi=^m;;NO9RPxzddiXVjAFTS@LH
z=mgM9`(vHlB2uYu=c`!vPKiL`i+#E+nAYy0>nd+Y`w&4~lTnm)w@jKKF9w;uD?kJF
z0gK6uue8je1n%MZ9-qexk-7L&^BypdngP}Ua;07;){;ZhAmg|%fq9AWUAM%g;I|Me
z<>Td7<0?!eYU%msAbZnF&1UyxZT1%zb#HjtljB)nn8_yJE_C4Aap`CJ{~Ty_qi7
zt`(Qi7v3og*#JO!8`|F0p0xkAs8M=+WZ
zDscIU{Ue5r
z5Mu>%cEo;Z?sBHu8GK+L8YhqOq}Al8K$ZlxNf5L(
zt?D}mgGTP(SXZtQU_8B|LHYLF6;gqDc)k_xMakGi40k8YIa#U5vH>DY
zE=Vqoui}VA^cod~YF+s;(Z;^>Mer$0Nny5_>P;aXzqCN?k1}Un@M6CeGW*hZh%uNe
zb(!J`nbyJHO<)_l*N~+*+A=C=kGI=$`5oWXfV~j=wgFB_ogqyz(H+uxTen)H>il|1
z96TAVqK8=v8BOcIW2QGcaX3fZZbtfCZ^C(>QBT*`o>mPDpjDpQN3p5SZ}Yd(HVn0{
z7IOXKAwcJz_MfR@iezx%oJE_(G6%I_w#{9E25JU8oWdbst^$_E1(DMBmHT2xMVT}T
z1di1~;nrX)RQ(SY=2KoD6tC}R6w)`OGLRGkC&@>H3fCvCYjh44YfuSzO$q!egsM+=
z`~a&f^*2L<8pwU?K;~7t;+`OgvFA~B2Tm!%-Srxp*IYa~EovH;Npjf4J-@f;s;5E>
zrV+>b9J#xoMKFinL)r)V7qlH%R{;~(P}8pwEB0{HF?I`8Z!PBTGNRD*_qGurMSOn#
z;(8wDkE+zRY7h%Pe6S(5agQ~1R3S;2yUo14aew6Lq5#lfz^$x}&rya&%7ys(`)k#P
zTJu*HPLLt;?qpnx@gVi9+r{j$PNdg4Tq2K{5edGqGs+?>K^6LQ_UBF8w-{@gDG`AD
z^Gh5shgxi7HU5ZZ>#1xzD~#BFt~LMi%!rbsPOw+~yH%~P(e`q=UiZEHUmP>bFU^0W
z1sIw2Gk#k#^;TU`NP--jW%x;A_C{NL#-!D=Iw;rY*2NLwIORGd$-Uq0gP*>dYO%r{
z2dhcsOke{0YZkEI(?Y0G8_sANbKiqJ;t!zP?zFV($9s`?w|-f$nvv!4`W&{%9CX4O
z1u^D1v!oo-9fk!FCKIsuXGeB!Zr1-hK5!OYf)26Py`_X9Gju>sF3RjX{1*v5q580&
zvo#3h-~Oz#*j$lf_;|#&;@Bg^irk(jt(|>a1l!I9-V?(9EPeTXLI9n3S5QBOaFj3b
z>ygv`@>89}$DZQ~=xKSa)my?JOK?0&(?Oa_Nd8*x4PjNGLD1`3MvdY$+)EWJR>KXJ
zmYMN=He}_RxUXfq%X@S0vSF)2x0X$g#QnOUa#52zpEFStkFjA91>EA?_%hhT-mk7W
zzeHIN89WJ6!Xj=rTH#L4v_?G6a=6J-i*=ryhWg{cjkHJqtJ5dQQ0YLs2XJ!cJfvfE
z6boJ3UrU^qzKWer!4?ZG&kqb1%3=OLm3&aX-9o2^t!R7=Q7szY6;Qn9^?{lop*mD5
z_V=KT+qFp+R~+(UdANfdUFlX8nUiiA`|~c2+99we3QNvyJpA=~lRrQzbI2V5yZMPD
z%Lvyhm2EF!{`6PEwit?iL^mc8bLt=!@k;pb`CSBX&`X=u+_*f=c2F~la-h`OI0btf
zC6tBQ{v&Z78eRT%iefR@;%;DL3X)NLx_`P~u#$c$dOhtog9zX1@05r9Of9eAsplyOu3_EmJET4O#SzQbx$dK?o?rl%;x77y=3jroGBtg-&
z+=1_>ONXXVbbiBmydbS3B7%4e$00s+d<`?#CBgfxnSKh1lxF8$Y=5%9S>%fM?wAiM
zhqwA90!y#p+EOl^6M~H2kEB+9diGRjo#AJav-7Sse0gmg$o6p=<*o1Sv0NBG!_P+b
zI_1giJuAqNCBsbdekXEdpH@)}k!bK4VF+FwJo|i~FE9%rwv>kjhh^Xop4Nd}ByWmu
zMdJC!9iJS$XR8yL7`It2Ra0Sd&j${t=@u-L=%S{=K*by@;9?PB>uW4y((dq;WuDq_
zfyTc3j2=IV?C@k3^c=Q-PB@WO
zR!W~y>v?&>LeZ}vmHueOA#1!WM2@x`DO8a0z_A>kP*75cJ#J9+_tB;A)YkQxy8B40
zcU60yPfjb)qzGD2Nd~7~Dit<`xW9)p)%xbeJN-NSmqUubXh0EZadNTKmYd5Oa&VR!;!i!a8CPMp_RSlVfiHH9^U=qgD3otUc%(mZ`vq_A
zsD;gHo5v||LMV_6wl064d{r$Us{Bdm?&_gRX0xK8q@)oM`RFI_K=m~x9j9#EFjpgO
ziSs3SykXW^u)>uGZB+4om)m~G`O*29EMCD|rk*1{d6L1~1q(q}>7zMjF2=3T@7_~W
zfAE2TsZ1w%F$xBewJ;z%3eT|EJ5g5F^tOav5^a>T0euuY$H>p!5GgA(C~k9s
z*orej*WtiM(?9X2T;+M9*3HNkvq8Q>ZIcSStaFI<8nuR4(0DSGauIpH(6h9Jh$z#G
zv{X|Ci;X0r=vKJJ<YhUGN3*ap}~d*=4mSz(5(qA^#%
z@c@MO=^{*74O@4kn5Rh967=Hg#Y}i}i;?N^>lNkuz0x_7R3%?wOXb6QSx?XL=(Rc$^K$K^Lt3f@NUGr
z^T%l4m?@o!FF?s%gxFJo?MyGsutV>g>{BUY=!vu4YLGLjj}uVv^Cywb5$QxQ+{UV3
zANJ!N4tZM&n<>G|wqw1Vl`J0iCDD&QEgs4PumRgLJOdaxjaB1*^BjW_(V_;(74(wA@e
zA<24=pCZtQ+|MvxUxnB>d(p*lkIPbZov^xH;Go4@8`)71X!{I4S#^2F_=_LB-*RbX
z6IJVd!aIo!j-q_umP!VGs9UmItu$Bpj?~sBD0+p^twFVRX6WcykdQUNCZe^DM=N8T
zuKLf~@uwmd3fp6N!|)?xwX_8wouL5n*FWt=e3*~uGt;Kb3Jnn?I6}=@u`yp#&5~W5
z{TW?rkw#rEL5SJ#g=k5hRM(u!ei+x(P=b`8^7Rgt4xK}6&4|ohsT9a7DF7X1J~2BC
z(}jljMv#3Yi@7BB&a){#`3srW
z?%Tr3K^AqlOYw0RYDFuhvNL@%Y00M;Q0ObTXPF@oHj)ND-MUj02=<2M@
z-aehytjS>jb%_LX8_>#tGGYVvt50#H{h#zY)=SjR$2=i50JKi4lDpkLLlE%S&M7%f
z!vseB^pr9&JOUDtDyZ?UD`UJJQV+i+gKebuf@$dbqP5%crrB{@T}rb$JVpxy5@B8N
zex-lB!)vcJjl%G}{S^J+BiUltpZ9}y{t+TD`{+yA`3Q@(-kdV)9Ja!8m<2a4Bd{Vv
zVYSGG;p5VC4uebQ0)EG*~&?S=+I;Y1}Hx6P=Gx%jl_x
zhI^_*mL$e{7v&!j@hi=UbkM+w<*@<`Q`rt@fA3inUfy<8uxjjI3DAJFw$+^m9>|H@
zJ)>%yp}iAdYn02QW>>No-eZ-Ml*V(~r9FG2XHZQlO7VW-RNRk3?Fyk=f_iw-`fkk6
zN)FK?EwG~aFq>*Z`TEFP*^hF$Ci2L4Q?>A@ltzL9{4E+XPD?CL@W6mJa@Pb5Rpkz2
z*g$CY%CK7So?6Xz+9ES@e3ht!ile-t$T1sC#13-rRr^3%Crw}JiWlr)tDNfiPT0dB>;`Aqd
zG2e*mz^|i?H%3!Bh&I4$^;1Z*wl!(#pu=I`YQ~U?F#Yq~Y%&)5$ufo7>v(GqHn-I4
z$oJQ74o6-U6Xb01A~xeJjSRaRBwe|tErf~RTV#=h;iM=+dKX5HO-}NvDABHojb6xF
zE7B)}s2qcK6=bg4w}!iuuGZN4ly$4TI~KuwX4QU>ab9K1L5D=@gk&pzzkN0k
zVA1qPrbS9N+1^#7Q_#fkJ+Nr(SeE85|1ke%v9X$c8T~qkuKi~vyj~l$?E|mznJ`_J
zpNAcR*#1Oia2*W@58T2)qYtKY#1jlCS`(4TVT6+Oea1bSC@HJ}EPme?YDpmQ6tUrj
zf$aq&I%8g>jDnO%P?h){io#<7-?ider6Y1c4Z^2;u$H~p%kqx+F`Ey_N~G~Z6E}Na
zY7+<3_2Le;_}zJ6r&_9+e3n@-d_6O`p+@{sSXN%>-MEvPK(Vyq(L`-wsG_W-@=xS>
z#H{`DN(Lvq8LcKt7pkAEaydga@PWtIEq4~xUbx1w;F5RwoRX3WHtM#=YxR`x?>C`%
zW7>F3KF~abKH3zlRwI_XIXt9|Fmprg8;%sjzfT#f(VJVo3#|Fy3l(KudpAJ}mGqj|
z^A{lG2`J7d)FXCQI5sA9TzVwGy;YiiMM`hM&!Jnj`t9p82of||K!lNJAvCuaAi<5|VY!
zE@!1qe(6Om+#+VVgtgT4H1YSqzJ$5$x$;(&7D@vs=tN0*vh)&^GSDf$vLiwH^kk*f
z0Y9gzIf8mOYh}HNM`&Q(FodE3G4?O8MprNV-lIs+N^%<49#;C=AYqsdCF%DJGu9|J
zuTiR;4>I~}Io=ud?P`@2e6JWtV=oGmxeE6}r#9gpOyjWEb8t1q1jf13>TPpOgp7~B
zHoxboT+~Yi6iKrqMAV1-Hx+Q(jA7ivoH9r4ULK!<@RUGZlt0`PiZBgOiNrn;i!efC
z`BroC?^)tZ=5LFm9T?xb2~6ZfCz9RTz!81dVf+0?ZxEaEs5#+{Z8ami9`urwiZmKP
z{tK$pEl*MD>~Bv{?6I1plDD_$;;AVFUE#G*J>4jp*upF<+yvx6jqr-GVznpG1{AS?
zvl$qxZNJ`Lup{`s&HD(E<3nL0O6IU(6!-en7Krtg3L`_sX1c^o?@B-_JTEdVCYSqc
zJIFpHBXrRjt_J}<`&vcj$G{wn)~_ieTU#A%9b9(7U6>v%=ier2YbF@L
zahc@ZF$uuIA-(a%;%^=k3#;|R>RL-zcD`i@35&b=VO=_vToSWGm|Nc}=0I}#Zy@K5
z4q)#QtI55PH0#CpiY;Wi&`qX0+tx!F^)d%hYkW9fqCABE9KVNflPZWEQ5NNYE-q|$
z->syetJYj$(v$r{qCaG;;&Lr77C?%8Tr_76VO_bFtWn6CW~4ZQhcgQDO@gv`JibYO
zypA1SsTSrdxqr^|yX?TNuvQhHp)iYY(S94b27ePDY`wVTYS+{3MR_XStZI~<8u+zr
zwS6UT#j3KBTr<1rD{$RJB=m^SWkkk9U;qk5y@CSvdZyOWc5VC!!+Z<)aUF*
zB!Vfeg>v?b>cY{ro`PvE)=xHjR4EzKRwrl{wyuTU7lVv|vK9|j57~es6{N}qP8Iii
zoubYQmBdx`PDyo;ACX>@1_?N=(j`~2icRFJ)CWKBb`Bl)u17GwH
z=6J)>&$2$NBw=Az^`kZ8cbt=z2IScI8lC^v3m8FBntLNz^#Mib>vEuEK*1
z0NiEG)hUqWb^o}Wc&F0748)(4YQtYdj0pe*jNsDS8}i8q7RGRak{iSP4;a)8#Rz+4
zt6v{Tyq}QrtsA}=Axw8o-E-trc5wfQrmCeVe*Pw8Rud@xDfL?>k_Nmo>1$LlBdnSR1FEHKVJo~HH1Afr?$6ewAH@9Q(`c5Jj2@!K)i>206qxX9LT{
zE++6=!4?mdy@M1I22)2EV|+SPX99x{g{{Jn{FE
zT!p+lMkb4@ZrQiwnqb*nm{EO#!wFF)r2v4G0S*~11GVEic-z{1Q9lyMXpBi-Cc(2V
zlq_bEbNryq~;MK$K>`oC}ofj2ODJ4LP1QY`0DpjZIDR8HW%6CwjW*3YF4IqXsPd
z1HHV~E{{v&eVbXGl`(f4hp}yz0`WBbXjqFHHn_CL7}XiT(rKt0nNj)cx+fK<&=2_S
zd0stv&2-HW?+XMJgM!4!8%q=RK#ny!=21jO-u4#WXpUqLA~(fV>FyW-P?98>$qEBx
zgh`h5JAVEwTF7fSwT^)hkMpC5Ava(^j_+GxsfkWjA#S7E@JI|=;BIC(b)CrK;N5z04MRsPP}Hq_5(vSXk`jJ0moYK!4NK@ZgwJAgG5;9wRX#o2
zpvY~U)d?X^@D{25U5@gg+A%Ri;*UY$s-7aCe63q1ae)Ld{_0`GgM`nE&LC5tN}8u8Qd>{v6c;gf93z_o4-o4Jn1(FH%T2X(G91qf~rt{c{p7W&>|P8`|8
zWv573jLW`qLx2w%l!9i5?x=K7uS4DQlH6H4janbR@{S9-gKVHX*c_3>M+TSt@dAsM
zdr=x}zHHL5W{*eAPLQlqqYVMtCk@)4_-~qB-DWZxWkguUR!M_55%GEtg4VJtH{?W9
z|EA8&ikdHF$HLHdwR9Xt-2kYCYZJS`MN=2mVuKi+2>(ZKbe4{
z1?2iE4)yCdPzDmW82bCOD~`Yac2aPYO1nT}JIyT3FwO1Pu3}Et(xsj@SGCB)IVcEL
zlj05vAy&*i^4r8(i~0RR``Gp|(ts2ki*Q0i%}a|!2y|TT5HS+Z19)^#`MDLS>vuQ&
zbOyRgouM-x&lh+OQ4y!PpHhqpvWDE-q-C84k;i^(zSqM9dH
zjqmks$7#{fql*G#4Xx3hHHPxRqxnDf!S=x#bEl{E8v#Hc$=0Vh|
zgFk<01s!Y+Iv9u`Gw}V(7;_lSNdV4l5Cy&ADiW`anA~wE9nyR7p6hykjw7q
zc?Xru!*isj@ad{C?e~&A_(oHQry{EarVhMjp(aqw!)fePk>6K
zKo&d7<;Pjp+%VlZ>!=&5x|u@D=r%)l)b$yz>Tz_N6$;d7_emAb0V-iYgvtimwSqgg
z6?5W%INFYm>O(?_NUN!*josIrhjTi^xCYvcRe%@;05cwGL776$^yF*fS$ONgWpv{0
zM~f9Z#|1*#r>nU7G-e+70fVIjrBNQMOkaGx5sqj|Zoa*6Wykh!n>W4qEv6_54niUP
z><+tKXvDQGQncC4=(cY!E|**xQWOp-QvW0PTV2-QOp;rv8acZemjJnywK=ok3=^5O
z(X~qKT;fJooO-`Ko7XTd3!O`;%dkfa(DpcS1Js$cyK{PsTHSUvV{YUr1Ih{vjMkXm
zsT3&P)_n9+k!r)wsM0Vyts?ApPkq4VWjE3o#?{9lPJXYnLJR^iW{oC(vPT4OIV3l5
zXICyF(^jJ03He8Hh)~X9Ad`OLe+E0EF@xJ{GM0WUsJ8EHZMS!gu$KQF%X%sfURqQ(
zOHUle7aWup=MipM^crL%ucNH5>zD4eYgPEZ=nPkjY&^;c-YscI5xn*yy?K^&9I{;
zOyH_Dm&oP4&C_y$?h7k)m?j2gymA!FkwT`x&Y>=)+^e`vGx^EQ(J#n=KSSRc=y7T#
z0qH%4FbMqbea7mNu61q?^HWK|-Q9_nRbp%xR0xi*5H!7)SZEZtsHV~ddVKJubwGnt
z)aCfBx78)9(zK^c%BfmD?=SZ`y013d#^BJ2S$#>G!~cnZQq_zoLdU-Mn>VHH&0!iF
zVXA=AXsmp;S#moPG@G*;3!>RMkg0_0D6HO=0{D9;Qld@SKPZn
zXayk4u4@Xr6~nGj5}et{w|wB)rvK9@Xpw
zRWiVsSHEnD8V=;A!0e4-g**)Z8zu_Q1|`gP@I!TL7L;@;!*;4V
z{;!Zi^m6tGw(xq8l<~V0xlJ_E0vS^LTky(zCRA0Uj#mawi-T|dsi~hDva@#>walJb%srAz{c%Y?_ycgp=osFwt?)D6sL$N@$-ikfu#bOhT~~jcBO&wAPOw@NKot`nUYWmU+l#zS12rl~aY&c>@SD-tR&RQW
zi%bz;*|dnr7btGacSIAragPfMjon2ZWq{JX4&(kwRBC>50EP4S?Z7Kx<#p8kquy4)
z78VR&sDZj#Dv)DRCs_3(!-jOW^-ivWr}c6A2b7!mJoOhmb5&A$T+ku8e&(7K>6+Z`
zPYrL)a*d@f*Ay`a>C-?$xqp*Jd%xG<1W4hf0X~8j+Z%tK*?H~a7r~#m_<~To6E}aG
z$6Ov0ILVJkL3gvgrK&+i3Kc}so4@l|zGzrBp1zSRo(4_OxsB4R-9RkY)<^lQ=5>_$
zN73Za5r~gYVn!bBr|au%68#ikZESaY&3QgvC7E^EDI=hi>*!l2T~WO2CCU@H=Ktbs
z+70FSFU-w-xM@ZC3KP3$Sy~Z5dhz6Xmw<%Nhsme&{9DbYaSs#$4lRm+Y|B2W8_@HPIJqpt1qRYp_AZ5OqSrhFz4!)l;HL5@gr2<}_143xcu(%4R61i-
zzl|8G$bQj^+0gwytT&^j8=_Q*r)*RSEjp~1W0p=pj$mNr%1a*J``Qe(3IzRy&`Zs2{g!8Gf*)F1Pt9lX7$$Kt^52x6yXC>+tn0yr7+#kR
zkf;7&v7UmE#sw3aKSN;Rq(&Zl9&$b8hjBl|an(fp=qbBgYb`Qu&l^AacUnNpc=U{;LyT)LML-9HfU_fkrkgm-{vi}}cYP>u4uUNyX2&NzAJ$FyHVCn=#BtrX
zdRu8yH{pHrkoyP&KjLtm;;$zc?z<@9;1`lH;m*s-680d-pbK3zlEax(PGGBmP5jq`
z$8`9CmjAbtMdr025f&?O#IQ-^0n>}PElkc};Dt+*Sx|a-;{?%gC7P|)Q_>BM>ekFJX~^o-_!oRTcMUfL{R5>S8frP8B>-ft3XNweNLJRX^Icp}
zEpjIps@IFs~yBN&?tL)bPtj83_9FUm$~3D*Zf`NYu>%Xu*13V$$tHrF%Q
zgddrFX(m-b`X3AgVHTbaTs+gZ>CT;0T3wY&6UHX$^>(+&7t_tR@6Q*Su`J>(J2V0xA3Sy5?m;T3yRew8~EvRM0V?6D?=S
zb-F;9zAviBMfdpX=b&f2GZ0V^Km@Z&HRBJ;Pj^4(Evn5a4(9(d7Oj5Iq@B58mhjo?
zQ2X`F44BOyn@N-J^t->%gGS}{=DxWlsY5R%?B_2Y2`U-&%@KK>?`n9+X->5**`UZ0
z5!QMEb@5-}8KTZ)K>SC4rga$IRWIbg7wf>6H@aj2eR=XUfrh{JZ7!R6)(YY)Y;2fb
z?ZLb%O%`8`+>ttc)zaXB{>64V0@j9{-d%RlTUJ2i<0l|C&5zUs!9V*0ox<#I1`S76
zpaa13(PsMT`ja2AL*OG`D%9t6sLgY4Hk55Npro7OYabFQSpns^GYA(8i9cCHK@
zoTnH_*~13YS7-RM44)YO6jxB{=MNGQ&RYPctL6II1{gm#98ta|7jvT?wq*tGHObhi
zsHwz{s51ROM15sIRL|SCgwiDl(p{30(hW;DNK1Eji*&Q3ba!`y(v8H@ozmU)9`*Ns
z-Y@%cXJ^itxv#osr57j9GzLcuSAV6!Fp)kCppqVo-hYHKD%1J;4ef3hIo6l+hfMUvLHL!B&4k&t!gT7=0cSHdxphYUEwtvM)TvSYyKma_1~$$DZK8oqiF
z8t<+wbX!$yg9C8O3c27tgevBtPsr#?@XZB!GL)
zU~tFSv|=BfZNi~0@0T}<(mcweeSWkHN)}LaL&X?&tvM<41fI7R5Ohjmv3@V*^C0J3
zh_CdUz#B8CfylZj6f&b~q9gp6Mcs7Q?+Ec4t6m;hX-rN%sek9W^g<(yV^ySM#Ja1&
zGedH-C3O6}z{O2|J?9R|jB<2cf(MWi8}wWD>JQL=iG0psV!OjDq!eaiKtLEE9iqV1
zh8a9m|IG4x_WHX~)kfsJZvP|!yQe%;uWD#P*6AS+NRKr*RRe5!{c}5!r|CD2c}ZY<
zDx5){`Xef;_UBWT6t*)Bz~rQp$8>7Zv2Df2D0Ojc6cGi+#j;l!pb|rvo?a74*X9%r+;VY;9KEmG=Z}VZZOIj6sKDz{ZAvCv4pml%Y$QFJAN*RjH=NVxgT
zPz+WHzyGz3R4|7g^g5#ai3MVCW-VW-*)yJ<@VxbTl5l7c_P#nB=L;N?F5agz!4sP!
zVzypKqPf2(XM8@OL-{8W9^hzQIS-=gks6QDkb0>&x>Ax7aQp=7uIyJa+43O4`*U2H
z$1Rxbr+=Ap)FK=5Ss=K}q3@;+B;1OQ-9!+VPfWu36HhfRCt`G@W>o(Iyn9Ewid(Et
z4sX1;uUZ@ix0Uk}+>dYS1?ReCwLhIIrFh;N8xbz`Nk92|937%Nt|1e6dA-o()|LYO
zm~CO-%AxjoG1R|!d@y*U4O-y#53n=>B?@q|l#p
z+7Py1p8a}uA80EF;87YnX#Ge<0Z>mSdNl*eB4fZs=;lT^(VY7xYuDKsSmNSsNXyt-r64J-%<_L?gf0R&@wx*VN5bUkNl+2$?uau=)&s
z9~>~ToS>p#XT^iV$)TRK4di@TTB6nP&(ky&QPFIWVR&;hm5HvW^?vZ52}R`wXd-L$-K
zCU|srvzmz#4%vNyLrO6@T~Qv!IhQ$i=7pl%ku=b&Yaj7WHI!2&q#SAgoaSu#w|f(
zx=294myTQ;y|_hm$$QZ*RdekxxgJ3e=rd+zQ5zTg+1wadHTXY9%^-!V2nL4*kjm3i
zu23{XiBNYpYWc?9&uJP0_1U^-aFq^L)ZNjcH=~80FQs1e_v4kbDyBJZwXt)A#rda7kL-Q$ByMaa61`P|a-bVJp7(`D8t)ix
z*hzcRgX(A1T2dTP0eXT`q#_oyko?Nh4`MtGI)5FXf}|Kl0lFsj&-qm@f^x8SKqFH-
zemg|f9689^Ir-EzNm6^Q5*EU
zf*9H8M76m_6<2ICGyd?uuDrmN29@fd~}9@^tIk7V`*Ae^(6GZpUG4Yd2+Xj`i-26SF{h
z<;Q4gcn6&?;O)PqvEhr;Ujz5$nkb_LC7jvMNX+8xqHAldMHGH1&6hc>TbFmg%dKC%
zLKygS7r;Am#hPjk{KY8Og6C5)t#ucs;bF=(#ee9NGQikI_QX_v$OT?Md`TS3_kval
zCn8C!Oyj%@+a~D28!Z(wo5-z5VlJI4+c86>0CofC2YVK{fMa$im@)&DKj&FAAYg)|
zi26~?qJg_)AZeW9VAJq^3>L@0kFxBuRHiR#M#Rg5y*4}&rVUA#8gs2J_7k=s^S`0g
z2F_`j1P>l+)8&^RDkRB&{afAA@qh>Ce=1SmGAb8Jj1pt-l{E@&G)1%s(j5kCW4Y+c
zq5&+f=^&J6)FTBzfR`|XODA7bdAo+K#by^pzR+K_|`82wF4sohdZOP@C?`Nj8hb^#MS
ztSUsc^2b#}tIS@G$70Snzvx1^vdXMhlJH+$j55<4-g%x}LSsG?0Ga0`T%sEEN{q7jh0t*VPcue
z7W7jKM-HoBY)M=~&)#(5t#VJBFzj-@j&+!%(9k50`41VI)&AF_QZi4Y0wBgw#}!@g
zX|XZa&zYk^L+(^xZ~hSqR-+vBtN-Ys@Z8%DBYYsPqX4h9#Vj1pH4z0xYF2ln!=+t=
z0Qd
zE~*ykY80P&>Bss~`XiWN`9S%kj+y}_%@EK{u5V@bDgGxl^CSp({J6adDjMnI>_F*u
z9DGKD3SnotAkyntHRh15Z}YG$Gh-4zgKd+S^&+GG(G+m^BYLPk$@e+
zqX$Z{PTu^*2*Ys9&9Ob>HP0<9$<7bB5}Q?{&|C;*Z^%Kn8n&b6vhJdb`9bM-a{Afq
zcf7bvpRTZ3I7TXuh#D`7P=eQ%Zq;n=>kGa^OO{8zpQob+^jNJJN3WzbGUKFeND2A3
zLPoHTjW={45(TA**uw5D;-_d61`KZBrvzzPUn=C2ps;D+cDf>O|KW9`bCpEf3s?R-
zj;kZIf3eFvy|QLY#fGeSY@c2tW1tn`h`m21=_eNGoJ`(Bj3mbK@Evfa0>wAltA(|R
zHU=QTTG6O`j7SPI7{Pv_V0Z27@L&Pv{fis9bCapfueh|5!9lmAga@T+ZMH8ADB{Itq+#wIeF=>(sCwBblc&h46H3l2g3Dd;XC4i{}
z4#;9|W)zKV02!R;=0TIhD%;~{MU0=(%3?pb*;+%#aG;*vWAOlhqw5y_`9nx@wd!F9
zckN0MD^~)a0t2-6gG!#ciPw$2_amt-t)2URn@yviS;>oHwr&
z>s2vnJ~_wA4r`FqMccUYe?7&~QyLWqX8DhNyOnzQXX?GG?B^-v_#{QM(Y!H2OMy7i
zx}ws>AZ9!hpR{g3+<=7su;u-m1)jjrFb7|^Ovjx>uZQBkQ3`>Y~E{zGvpk3Y=uKxycM-!rW0OnuW
zr_Z#$mf%^PqK{PH$(2hqnNsBFNnn(vCPR)(!j;tXCGn5xa}6}fJ&W{!B#okWbF8@6
z{v{k&32>}h&|b%K@I7*zc>VK;K4o(?{d31vM#c4E2Gjf(!QO7I79Vf-i8MC(s+t4!08S{#n_|>G9}6t$G&hBb?Cey`O-8ej~&s%
zrD`MUmpAl7GF<^yG=)`<_zs&jP4?)r|
zm&Q{Hw|nM{EkJNqXltii+i5tvuAnA%JI*Q+h}dxTXi)j*mxLzlNP4(wQU!~gOzL%r
zoJZ&ru<2E@IQ4yS-{4in8Me0$ed1
z(HV(ALAe|c*%0gQh6uIOR04lWrHmuOQ2S5I=)kr?lI^T12D41WYwR6pkk%=0SjXMm
z(s9i-Qfe#FF(WV}POG9&a~YBW_KlDDWV{g7wNhiV
zV{R&X{dSRs6fiC&^nc-C$d~uI15Xt|%Xtgrdx3C0?cYYyUzw@0ntCK;e#73IuWDS%
zxBIFZ49ef^m?P;4PRtTRTmpxi)mY)}{PGKdSBH`pp-u
z5PV-~8Ra*X?`66+_utQft`!@AO#HJmNs>OL#sWsIED9%Asf1u&CyYmdPJ5o<`3JcOvzah{(?_e^%T25ICKTY6i{+*~=j0Jz?jSLBNuL)z9{+`aK
zb$gXvfbEme{}kS1&o}Czek}K&!q(io5Wou{c!TeQgC6i9DZ4EV%@%O6FTjOiFE7ZM
zy0ZVf-{~kFE*`6QA!v1!YqxC;VmZe3DvazK&PNgwTv!UJM0zeHc<8rc6o_vW6ck}U
z7OtjM+Kdgq;n?l;03TG1jEr46xvJiHIii}He(+i^*1b+YMr4#dd@ZEstg#0oUeRg#
z-6bI-qA*L;)oVtR}pFR?XSQXE60x;v&S=5=sno6$RfwzLW^z>
zn@j|)?6)uyzV?*ES(|?lO^xMCwYQjk!&u$q@j&K&V&Bx{X?IpxH`Q_}0OnVUXr0jX
zJ#dOQzF)H`8Xk9=j_7`RvayQ(Jzq-Duvzv71mZpzP+oZpr2{|MkVN7$$~gK)#*I|N
z;R}zPwO%eRx)vej(I>>4Uv^rlj@C(h*6&MvZo{OJJ7$-IzBgMo->UT%J0-RGnzCWx
z6d9~A<}xyZR-BP-{C>JPZ%#Blt3XY@SsL!Xa_1L;;|+}=f*GSLgkFS?iK&aktY@Ku
z;)9nw31^g88X`k}erajSt^@~U)*+PE
z>J%1MU^*$>wmxl#DKoSNEvm?Pg|XG68Fg5|3&&kLvxbhd9~9`_azD9%m`#8lU?HH&5igRW?VTtM$Ykc
z!8;nRyyY}PY01_m&z#X=>h+C1^J0XAM&YjkMs#0`7Jk86Jm2CbZ}xU@Z8-L#gy^&>
z*4m@%f}|*Fs8B_(Y*#Jc^kPeB()nl^u*?>iJ!oPPyZzKo*f+L;A_c8HRjgyPPj4HM
zdjIxS*)AFs(-
zsIPIjqaz(-jZzd_}
zcp{i*mH1vd*+y?N^Wk=7TGz2t+yNv!3Cfjho|Dene_#Uh=^T24pH6;96cu96r=}cU
zx1h&f1>S-1=QLbmEu4|@yRc(>eagF~4qG7lzy;1S~fDhk(!kI!_&
zcc&{rN|y&k|AZ&ZTGmlsh98{d)D>SBKf|`6uyKbWny~NEQMnQeH0AiZ;}q{P-4l4)
zdBMNlM1!la9+XeRN8cB=S41nlwptMvyGLtKdfnMcVyI0lX2oSe;jp9)yTs6E;VI#`
zKA#ytA%DM>%IPX`&;p`UOyqA<>KORyZ~{|%zCY`4akZu(
z@2tv!Ko*jzUZLVymuo~=HoA0Lq?ngzqMQo--gf@
zhzYkbeePC!N4#BIGRL~E2bw#rK_{3-&RKUtPOv*ks)}MvQifF8qsN~+U!Znm#U
zlshszZ6_kwaPqowors85l-|@IWVjnUi?f~iG;mb-Qcj4$u5;K5zqB^D_dOsl{YO!A
zHawQKPUOa{3w*lr$f4=F=O!VrHR(t-9w$Apjuvxp2~r7~TR3cGj`ue$l^q%IAlo%@
z>O{MoI3vl|`#kl=?-uD_C>et4Rf%&(rMJBaQ=pOXS-{p1%06w`siw=@jDS2MiAV#r
zFu|WGuMf>L-Q;~`F2c2*p?P`_K1`9Rt{zB%)O_wR9Co5q--QrtRwHqC?UvwgIjQEO
z%?|#Ety*z}uJCy-*`i8O1XoB-s8J>n3v^vCU>PPjJ+ktwU7ZYzwR$<>q9I7?pyIzS
zNB3*%RaSUD^F-88?w8pJQeOwhlttOIsyS0q;19`>t8y0yPDvTDKBj~VKSTXSc{6eEc0JiOZSzvHkl$hr2kgwhNyh^nxP$W#d|knY?j&9
zH(Tmw2IOJK4+a;(ifRa&g+^;B(8Ri$Km68l6|Om478&q$iJ}Zz8p7mk>)N5HTihNw`loY%Z)i>ge3P6hwJ-6>k;eiRh
z7aJ17hPv)Ksf4o?SN2XXJJxhMmuW#qIH=>J5dnr`B!9O@PBdx2dw7oV@!A~L#p6c>
zzXjWv4l~BPOhWZ;r&12@$`0I*NIffhr4R}uHsIb9aVaJa`I-tfaa-fo5Qmv6W>@oh
zW$3-($XfbmrE3co+sJ@Ccff`9f<`gUw*qd_nItCZslJbKEQ5nURlti@
zG&WRgH9$gze)e-sA*u)@TF*XG$l*BQ(Y8mRG1!wp91%Diz~XTZ)xm-~_n9E#`R)wH
zltz@MQhYM%k%{Tb=KzZCI38Qx4;{CH$_nLN7ZJAM0lR2*Gcu6-lvQ8hodBPb`zjM&
zpKa;WmftLvvdCK0ljd-3*YbkG_glc3zEmXU`gwuF{
z$M@~kJ6|=dh43lO+U0-GZHqtDd%9>GNMZRb8CPR{b}MdyXEj4qHJK)i!3F<8oDZ?Pz4^DIk9ZOsFHGhhZ3xDyhnx3Q?udB-i3BwMm&*lf9`WdW-U
zgqras4~j{`HPtPM#gnEIK`Yl?k2YohnL{39O8;i>)BOP#L43Iij)ECN@ua~-Wle72@Tm|;O003X|z^iDc
z2^Nu)UTja_T3vKen$-s)=ceOU+`dd;Q7KJIYn;Z_K@0%J!R)Ps<3q>o$Pk^&wn-D~
z8uh0q^??-rk%9&bw#j=C6}yAow8UA
zazpAcBrGALpvMXe0;Tu~i^&NI_FUmI{G>kN?vtxWFxM#CH(f>?R8CdP+WKXqx;|&Q
z1X;ux1R^xysj2Ncgf@C@7w%X`%Ue~1TyK^{$ia1+)?7L$kiwnl%z2!&4&g1VU)gkI
z)T6V!i7g{+c>-ryJ3gaad97PJMW1xVg(vNUjD#{_S;VWSS|)EZXN*HLoCvHIVTh}{
z6}9Qv74x+lg=-$S6t@^sOAxUnBB;t%UXT0gY=j_ac-Iyn3tf$`4(uP8QLlva5Jc)#
zKc_Y7v5fjBHR0_2_!_|Oxi){jTJZUIJ2Wu853j&7zhec5$QzM9qOH8;9S`s&)$blo
zEK#MKEsrT<8W35?dQgEOpJk4IryQx8Fq)cozF;lj@W|^zzxw0=#x2RDxqzW83dPq8
zme9^A@IuN+kDfZ|C@7Y99MkrG{1wDUXg8w+JMvMnBqAVBiDED+1D-7&yVjhCFc5YA
zr0+T;qx|Xe!!n|zIos457a?!`3I!PaUmZwf)J(tFs8;$XD6M^wj@K1acIZoLCpoXP
z?KYK2yTae|A^D}(dyk~?ACgWR7dniu0te3g9_qM#bax0;Obpvbk&;|F2D-Txt911n
z*8~45eN147s#RE$CO0~L7!u$u!6V=bS|&FH*B!}n=@{S8F0|nFRvyJHuY#j8?5$&}
z&fZDLyf3lUSccnl1pD3b^WK2&md`QJ1>OG%X80d+*HkD_qQ0
zhRxUZqo)&0eoL&gXtt$hi_fr7%zlz+jDe?+4$opsanm{5L)cbLQ_}2Jizs9l&FwKnwA6?ggQ8g3&&!*ISePf8mIPcpg7)G4{fOQ>N57rt_aM7
z<@8)&R(U#2U`S)P-1I(!glJXEV)_OiBxPNOTn|6xZ?!SDA)l4%PEyG8aTN~_ir)XS
z5e_)?m7o^os~5fcoMKS|o{UQ`>xl~6dP4NcV(JPCI}1tYL}Gu8i&63aFprh;%5&pp
zw!0C@b!sR+*>oci^tp4q4pe*7f)gvoR3UQqT%H0|+Y1hFrCw)-4t-bRISUQ2uD(At$t|gK^e`Qd%lo@h6MIRml;{bQ6NDx=cgHiPmZ`sGYFmc1
zYuX_0afwSlXQLFYdG`P#J*0=B(lNSw;Lk=q|E*-R1H4sY^V(iY8kf_?hmy#dN3%{$
z6qH|iiBF`SN82B5n~@Y*)xMW1m6d&g5Tx
zsrWQXKCO;_Wq+J-xHo8bgYMz0VfR`k=^0vYB
zO2XhsV>#9Z0KjYDUj@e1RGBEG6i900hS3Qd6ox4!)A)9_i!xYmYoO{%?j>mJBEy?J=H#_{Y@
zIQ5BGXyHDhe;8f({F0|K_sbD4(PP=YilcmCOPNz1+KCOMGbQ0{xUu^nmJZ^S?YS6u
zfSr3;4&=DORW3}ti`sco^o7xF-@aWTsa9K8PV`(otb^VB+Lw#k34s%R0V+IYKSNMN
zgI0;SRk~@D9BVOLcznvbEdunakCc
z#CgW%QpeLAO1N@xSoDgarBhzaM_R@|mClOBmJN`REc*#VKv9#)<$9-}~E(f{`i@zeE+KK??Sh4q<`&eOk
ze5190?`iho@eIuV(sIqu_KFmvsN=|#>D8wG!+qia4$mlBpGLVnF)@9ZezIGz-;xGC
z-yga_tUoj!FE};=E_%FX1>!|3{gqw_x;Q$042<3IgB$Hy3U!WroG~VZtt~l`PCJ#+
zmGc(Of%U#mQ4!UT0oq-T59`S?7r(4Of9oX1Ik
z8?8#ZUE-P18=dIdm|DdVl#RL(0i^Xuh{}cs^)nY|<#vm(stm;D2TO5^Q&Nx`v&r
zA7BJ=)g$wKlR5gSJW}`Y+wQvKRpd;{oR0eW0%IERf-OJA|Nhm6_muBCm(GAgIfANAyQb39W>?f`VK
zdl(jK6m%eK6kcf{!P5DIT^yKsUx}2d1H7puJ_GkOQSiu7G5q+is^)uL&*TCV!U1Ci$~OzC&=1(kluIRcL9G
zKyn2DIUn{^)B|cYo`T&?2fU}(H*@gL2?{v{r9;l6A}gWT>Vd7qNE&;A7;3w12V^DH
z`dM3bJ%+G3ojfzm|Ep)q;L&mX(|^LCFa15FKQJnj#L{S9o-yR~GzK+JtRUzjG-#GNR
zcO2{pta#27+Xiy@SKMuT8p#p4b&y9&y#;iM=%UY&ZMfaC6XE08ZZ909v7vhPn7TN+
zB_Mj$Rj;5!4f1!@+eP5-8l`KVn3(mKN`Fu-1S1Xx3Z6|(oit&&&b*}b9SPnrDaAkW
zco>DP=p!(?z!}H6n#}C0RBy(&vxHwIl(ZhWHOxjG6*1Bct@IT%X|W~kXx-nmF)tca
z-L(w1M*U=9Zb|@WSx^F!vC22HdGvI_Z5(1U)0H^k;62>gHwgH8>qNQPz$u{1F&Vrx
zPG~XwimY!$e64*bO*oG43D~(WV$wJ%Ua0$z@Ldw&)YI&eD^S$3>=3JG8#p$|#f!;-
zs|b&%K>LE9)$?8otG{*l8R3idN8Zgvvh_Qd7g+HV6-q&2CDk?~ZFK0rRd`_do2Vf_
zvf?upz7$BRXzJco+*`53eLqbK=Hxgk#J-mBdPPO68&=ZOrg0%RLlg0Z=h*%enkie%
zfAYEKeg(e|XvMQ@T{$!b4=-k@LJXs!cg?#5=0*te3D~K4Fx0LKm}VBjf91jv$Vww2
zQEmafpKq(2%cVkL@p^#K%0?leb+qpPiZxd)UL03l_3=AQmLG`d`e`${*`0oPrjO3Y
zA1_klc0+}AI^3M9uOM$WtmYqC{#2hJ|ge<`+6V@PdNrx3o4
z&54BX&1HiEKl|2x*Sc8Ux|v(uLJya%D`8HHp?CM@l(W{zekZW_2!g5jK~Rx{Gg~c&
z@73^0@hF~X5q`fz^(uOm#Bv{<^ju*B_WPv2r(%>5YTb6A;&!b;v0a9
zdn3<$ImQSrd*y7pBRQio50Q`T`{zVN9YywCl({NXjpA>e@LFh!o~Tj*PtEv;Q3j9S
ze}bnc!3;FKyAirDTvYXpXf1a)!e9aS9-0de=mJ5^Bk=P5H-PpU1^0Lw9(?DrT=#sk
zuGuV%I^?~b2MC@ov7^NL{*k9iMqX>{;b+n0@I&9l<&YGrzI4TTX6G4jK;GD!h_3o@
zLF;|uf%nT(75uz_39$fsLRY!Fe$Sg3gc++8G_On~0U3XB)ve)0@~UWpst)HILd`zE
zK#O7ZZ99C9a&of2F~cURSmBTQZhzlF3D>YW?Xt~L=n8B0pHQwrweL_9!+$9NsKLvU
zk%q(xQ4?m@AP}ClC@rxvbT7|L_HSc;R%31Sb=somN
z!t~~Cn2=N#Tix!mMmx}q&>o9r#{uTYsA^bew6t4ufZpmM0iwpHwboq=N@%gL$bJoD1wf_L=_xC8Y
z6mBPXqWmLgp*~yHB`)FKUc)W
z`SCKwIFn+gX$m5pS5OL=?r^n4oWwQ-Z|;z-**p*BqZTpa31O{iWg*eTh<1yHqv6tjVHL
zdw1ady6TY>?@?dk49d3GHgmcyHxaI;O7zS1xFg;`^YLf
zAB)3JKp~A{r9j%ih#8xaJMd+~qZ{r;8lxavEwvRN`F2G``M8Rw2HL4i^RKxo=~-N$RrxChqOxO5#5^qpG~RtK6%ylklTs&u5YxI}7cRvqz<0Ec#q@X9p}wVE&El
z#DXTTsPtA;{@LZO-bPj)O47ZNT-&9k;V44!=Ms%B>h~WVxS?Bl$yCyU-o+NKdc1pm
zfDQk8k%0tu&@*bDPkM`SJr0p3R;0#hb_Ah;EUBblfEgwKqIVS`9&
zF6#{5XRf1HV4Jujye(heKAMoR!mRcNR{QDku7AS_0}zO6pj=~}#Fmb>c`qv}Kt5-L
zO@RUF>b
zXnm5UH2k%-h18*optUUAVZ&ejyc6SPiKx|}ux|(`n#ITxHDsuM3)2GgD`37JgaKiS
zoG~QMvY3E6;RbcM^&Or8_q6YSBwH+;&rNmHbS_B9#-6Ix$|7^i#2&N~g7K}}4pDPM
z>6}FG^t`eNkdqSu+d%Xd9_lz^C_-<183fvQzoc<-{G0`!;zMb
zkOeVWUF4VRn-Y^uWGn`FT+nIQJ-8$!%O`5n<$f|%`}gxyAU^d-6;#y>E8>X-1p
zl7MaL8T0j82c2vFx;Yi79#MzMUTOtl_eT;h8Xe=^jCGjK|&3nJ;2ANr+#dcGEt^#W)S=b6QR2!`)=rT{x^?T(nm6<(O8!Wsws>
z8Z5B|9Mm$fu-}c$EzVnxHY-ON%F3gz;_~siCEdZG&{VXa!5Zn@+`11=2!tYj1xuX9Z6V)CF9Y`1GN2IW{`{+r~y|X;u)QgLi%!ykc^gy
zCq0Ih=Q*XNAne#lh6AiURK!#!+97F=h&hv&e!H{FI$8?d#rMq;ePrf~sT{qa$U|xc
zmqXbGNRQP6&xfV%4CQbsv=fQ-pk73YFP&VTC;N*hejcC6ffdCgESbWDYPY(Z@@lx2
zK3OQkuOosHL6Sr;+4;RvB>5z3S)uks`A@Vms2`ml8u@r=))TKc?lXO8y2gh_wjp_T
zGYupLx}x@x&SN-!^9PnM*p9u-M1`c-EE*CjMu{W)8sL39#_{cOmLwNL*d-WhZjr=SBezF*F6mogH=M5QZ5b
zK>yJYk}`|a6cgz8^vAWkkp^4RWPn67XF|~D3=OSlsV!%=oPUahyspFaunF`wbu^hR
z?o0o>2Cv2P;X#8=iI-|Fz~H~v^~5*WDOVllov-%;;3qzbB4##1d|L;8VM7@@cVF$#
zJ^c^S>w$S}?~A>8va|C}51Z^>%nuYp+ko{xo`KWvrgGRdx_pApX-c5~!J);}&H
z*#A(zQE__+v&Hz{wmuR&A=_JmEN^yGvx$rt`%i4)RZy6-P*!$Xyf^J}RNuMM`lj>Q
zxe&`DkJ4rCmvmv!4ntbNx%?{5ehz?;MRoB@`bRWTR#O)cdWVaC8b$1fF4PG$72urz
zdKG}`Giia`>)DuPw(C)9T~`#LQ-{H?zLFJ4cdq|uMo3?ZKH~tLV4V60+FuHT(Lbt{
z6}#^kI3tp3obSt}Ei_DOKQ9egaeV;yPlQZfiw?F%BfX%6EdHQ9yqzvERd6s%>QXym
zH67mEXzP{Fe%y3DVlA^f*s*k6l?1{fG*-zY^CG~8;;Snv2fFb>lvGA
zwtv;25It75z8Vi`G`FE@CM`dCrXo-pNWQBt7ymk^MdXq~)*_%W&H9s97sH
z`}mB>lHV_srZ+J)XCKu#?~+EotAKI@Sex0wvoV8`t#U?Dd!YcG)!Eu5!tn252qux;
zo#-Am4=|dTfzbHKru7??6DAC7nu+4bpX~76Tk&-$qTw_V8PG3uVE4l5uHjkS!n}U~GIZb<9;j;yO3ww%qWd$t#RoeZTz)6S
zZ$!U)xeynZc_L%VZnwO*7*mXRa8WchQ$fSPc&nB+LL-mPct&!JGXJMvXy)B{T1wc3
zR*?*lbA!D_e03Zdw@$r)XEbgqOu6wE-M+e&E}-^j??4Ba1du5_%-Rm5eG)9X!YGGAJmN$o`{Wu}u33dW?W?AgbT$PU^@T2X9D96Wj8h%ixk8
z*2PCbIdf}-7eGlnKzGe&*gU4+8k_go92ZR0(6w(PwFH^pHrBL)JABkML)Eucgjrsa
z=V^GJ3z&1i>=lU;NszbvNn;%wX0b-xX%qb?Li3NkT&L9i#bxQ3B7iD4`NoC6Frq*B
zpHc;dHoH~(8W@t{>n{mX1gx?Y|EYdd1l-j8Ea8V}p@CcjYgIE^wJC?fx3K{8&b8|{
zn0jpPXmnB`i_XP(vONkZ+8b^@w*wI2f2t`NbEBls_G~7}?%ssMB>a6enp(niS~UkrmTZwM|(G
zI&=qCKy#}gFsO{zGrL<4H9~hUF3eyuJ7U7B*AT&efigbkJ>ggdlXt
zR$x>9`2=3`8N6lBmgD&uD^}M_`aRVc9_Z{DNAYr_c4_j)!V?J39rE}4_7!Cm$vyy?
zj&gw>hQ&80(v>p{40pAHz{&TTS!EUyk@5ctFVBUEeu{Ac?`@
z%RzXS2%JZs@IE>`dcis!THs-~?+RuWXG0aYpNJY7bHSaMYE#*`y(2;x93{3;_L1m&
z-%&{CrssjOk!Zc$=TDFc0Hm!unW6wDzm4Tog}Mj#4xxcl#&o-+DidPf*oC3r401Sy
zn6ZC_EC|$>eyY$@3-`Zo;wZx{hF>IWU(7#q`K#9cgz~)mOrxvSwVux-{CP(5AOrTG
z{Q$W?I=3D2?E8VcWY17=}E#TL$2n2h{-1O$B4-!>QajFiF(LbdBUQs!S^
zQPZ75fCG5*&0#1ka=4);W_adO?Jj|f>{_oFJzEjxS7-x`=BqiuofZwuFb>y$&kJ#R
z-yoe4SH$kFJs5puMx_TQ;qE;$Mw&cpnzx+o3x_=AzNXVS+m4|k4*SV=P!D|_nW?@G
zRvAEbztApH!H1sIK35)R64}$bf9m}ClGuvQZWze%gY9%99>_M(5$<)jT7I47z~N8Q
z2@VOpP`JC3#In)0(4wZ)Y;LLH;XmnSAW=ZDG!h{Ns7^u7V{R^@M(q@GD^=cWPC(<*
zK8cW0gj;@K?&=?@VuiA(-~g7=0+Z??5gueoaGof;`HrED7(T{sh(rriJdZE51?$VCbC+ph%^r6s_D`O|7$bDR5Etz=?blkftB5A$dqeK@
zYSl45J-o?!KFp
zbs{{LN~aJ9V__EOal0(X@Xbs=gy>X3@vE*gitu+h^>5Opwn#eByM%X;gxG`0L6rC=U_LE5-m%^tFC0Z8+>c
zK%pDG-dH@}_Zr~yslIpD|MPPT7V!QcUS9;SiZ+bu@JrXXujEZM?-@N$B6lRX
zH1Dwn)#oM^q21l1^fay=KihUi|wOwZWEY(9^?$Ck!z$IOA0HQ
z!W87%9XQk|RGAO~VO}K(MJ+Vdg+6*42r7UIKid{AJxURhDP0AhGGWX@CZZh1H8*@2$W?|r8SCyxxNobM(D
zs{!!!lN>OC_(=mjg`GA)BowrA#<8}M)47(;Yyf^SEB#2IYt1PBsM&DG+#Wzla?ZZ@
zUkcuM*UAr)`9W~L6;9FR5*<+VA~QQm=?pS-VO9@p*nKLfaJk!K%AQV`2Hwy1!-iI`
zzH#~;HXwNsnV#oYQ+j&|>wqK2tN>sKg#87ib@GDGwxadgK;maBOwVn1l4$f@ruPq2
z6*tU;mzz+1V!c%SkSbD4jK{2t-=Ve0X+R))5jF4HD!Olfs8$B~iS>Mt3%;o^HQSL*
z$pOW<5PUzUbEp{earbRKL21!o`1Tg^iESAAo8PEw(t5bqhI)!hgH;?O#j@n^y`E
zIH4|dU?w;B{{*=pFp8v1C>k4Qx*}@Q5q)LL)qz0mNlors?n+a8L
zQRx=I&{m~)L|TB*EQ_M_ST0Df3W5R(#s#Da2!tXkC&p&hi
z`6n}HPWirx?v^6%LBq>OHGMcSVE4BNJtu54muxc9iAx&a@xZgwqf&Hok<)w5cs}6n-dy5*2wud^dg$PeQl`$C9BwSe&
zc;2BT^>m1%tA>AQB>T`8I53UefuRk^+jsJ{Rf=upKjaWkGHpq*wQV+QuBW~@N|Y?e
za=+*EzHc;@KRYzmo5rWs`7Is1>yuNN8>59-cz@a1sI&GpI%9`E=2(_8InWVKgn?!I
zWcw4ljvsNtUSi0Q1an;%_#7V;6b3f4SR^`(yb4_dndUv-w?`n$bIjEi#Oe*C@2Hzl
zt!;fp`q-&g3oP;emPI=T!e674n
z<%S;BNP(rNZ3{K3KGO(qH4u*1cN|di&GCIcy54`du-JcYzhR=P<;~IHW0P`K6m-m5
z$O!IVf{Ic+H{*n?P1;!?JsG1Q;20+_UNZ2U_0ZSavgvN%(-1cd}_5l*wnyfgvwSCX5?2_M^I20XK+PA
z1p?u(eCpsM31=DX4)Sm_
zzBFCU`w1pNiS&j}4pA~zws>H)`fS-N+vSo?Y@Rf8ASJ1+O-evkN>a)|zo)&x^cjYE
z1abqW<}G7hTfX8$Q&brIX*PRNeCHLC_^y9Aqa#(*!WYGibHx8!a;GCBbRt
zrAPTeK5==FT;CFBX1E+3Zs&p#qnhNdmyV?=)zmt*=p6lnZi99jd}N^+z+t)87AJ8F
zb6q3``XN&OhS<`{Bo7yH1bhf$_w8*#yFIen=wA^!J>|ZeBB;vrN)?~eV)gOfr)|7y
z?5@N{iPXa{ZVSJNUF=)AndV2HPDid6c3r7$2w_vo8pL3FR
zc!VDZr_q;^f(0Qy5!B-@n5vjRX;Mad_p8?GofC4&0xCjL8vz5heVrnTY?xWmNJ&>p
zD76FkKtt>g_yuDV(^UrmU)0M+mT{m4wFk_S8xyzb%#JxZY+{WKgdSW^o*ACY)1!l5
zjJfly@sS6_;h2mYBsO&DK-`*O6yPAu87D!aG5S56tv?|8@cLViSI@S7wq;$|&h4zs
zUAE^rM!c5&gK^Wb^t8vgNg@8+=Pm>yR#cR|gcl-e#fHmlU~uAh?lv(kthobbQiWi`
zy<{{oNlAKs$jNG+XzMH8~bf1mXxgC{avw>G#Qo2GqyEcx;
zD=W9QOvVYtw#Snh9x}zxC{>RgC6s|l2_dN
zyPNl0O*V|F%ei1WwC>`=*t<4Ll{vAL*FP4Z9x{Wo1|hm8RoL3NP9#6zFYe!6I{G5`1U^K}<)ssV?A;F=S5wq#Y2F=e$zY9hvGX1Wn`#VHB2zTJG%bo6n%k3c
zY~*+Mz)#!BlAveW*Oh#3kBiwSBGm_TY(~B
zgH=T{k{s?WSF@7ysaG**yV+=fk|M0R2uZ0Y>g-dX1J>Q;1g)yOSoOwMnz!#8LnoB^c&@;(v9p8`Yfo(2BJ;Q4PcW&M5p&2n>)
zAjHl*C5QjL=$lR-#=BF%rZdMqtPL3~>e{WT9~!B2{nI+vC;EVNgu7Pax;N~EDS#U5
zFEn%Mx1nEEgE-1%9*^&7RycNh)XL1Ou}IpRVF`BYC8@0?YX_WGNR!Ol@zFF8BEu=}
z%ZE8h*!bBxYIcX){t>FUaQT#?k)oU8R4lKKFK
zY8~vWv6PN>-0dFtXZYw-0D4~stEB_RQ?h;2l*Lc;t<2@tWZV?UGUFr@?gii
zUMR5mTfzVyBM@_kerd!2OQRVud9S{Ny>7Hd!2d%yGX0}p1^BevvB@W3KXlU@0TIFT
z=j(ombosyrt|+sTq77iL5fq60xaecr6SafwY!@BFwvNnnbcXxMpcXtgA0fGm2Xf(kI?(QxDf@^RmxVyVU*nY
z@89lcHa(d=$Cx#0)EM=?RR!Uy$}&js`0!w0U`TSZlImb!P(Uy+@N`%h(3N=x7XMXX~rWSp~w%!gau@7b?KzNukElFT%=^
z=Jmy*N}e*0(l@)KfxAg*OBuK_1yo}B=tBAU(?*VrwCeoGyQYSock6msf0%d}@9>|<
zyvd1gd6)pp)fi2Xzfm;=yQT`R`!=LHzx&e_yNuwID2{3%gX8Aap1L~#fEd6IcyDWO
z0Y}|?Esu;p$+>ygx`h(wBLSl%-c}6E%t5&V2fI}7)cgnwHidIsWr-!9B6bB8`jNgd+D_8M5MwO}?Q0oWOKsU_A`Jz2~>Q`j0ML@g8;5qqB)08ItV
z3A4}a`o@x;?I$F1n(^i5_wO7o-x3B#iGLmIX^JCbj23ydFTGm|yfy6}(1n;S{u09&
z&PPw=>g`+`$-=AZ;|zPIOsjQ5J&|}(W>j60h*Fn|DE-_ZWHXZD@@Wzb!;}Y{g%RIW
zBaDL5tCkJl*CLrW2!YLj%&N9IHly&$kUTMz#)j9{oC9M!#3?zdcOJ-n{kgnZ{#hep
z^>bs$d$Q~h(Z0VSxxBpAL6ll#A6+e~t;AU30kw369cuwc4k3p@q$nr+o)W7REQj?iF{1Ezh=3(-pxxOXV`<#^
zil*YcO=A&$=&m88uYaGuss!h~#RVJ4=yIKB+1jZ^51=3d1Xs#d^(Ns!yzn1&(~RJz
zLfUSEom_#flY)H*jrbj{%m%NLj_!ymUYBvFXt6%G((OANu!wJ0Ux$G($Vvf@nt
zrYuoE1@RM#Nh+m4%ZjL#z)oBx7ia3k3g-!ts2i*}n2k+g4Em;@SBEe;+T4Wwi}1&2
zEmJogih20=g9;s-`OuERa9&8uEoMh(nXO@75|-|RozY7h0L}WhoSvF(lqEah`?Z>Uh?W);veJNAP8#x6zeSjif%Y6hlX!;k?*d;@cwuiGbjj
zZ-TN@MT_b$bWWJy=G^Ac<^)Aa&S}#Uc!kGmXJvu2ls3q2Xblm|vO-E7GQLGcMdn3&
zC0ud}l?|#D^0qbed{v=(7uHXlH29(1NyWKLC1<%rC2PeU`7=lEx2WFy9kHF6Mk*q5
z{-XXR~7#%aK{&a4({>cy5oI65D
zp4aB6bU;m8x`PYf4%3v9lb&bU%ysSXE88gVD(`}4l2iSaFUD%$@aOnp#snqm5~U|;
zf+%F%aS}se?~Ox&;?lAN>!2gTnX2Lx#Sdd+33-y;}T;oV+-jdTo(w#J3L#FlDI$D()m3|5uqqGXad)TAyqhbs`6`w2mKl)Uj>uXf`R@GN7
z=r!p+tmrH%{!Fu-`Q$Q~Yu5$MMwB?@a9_jvp@DTYI
zGBmOq@&aLabZ2x+Gy#!7rYa2!O`OK}8FS0or4`-N_Nf*ZM3blu+~L$#HXcJO<}WGa
zDKIIa)rt$a3;osO)sutEQA#9jHm6IqIcx2!*Q<>!Yi8%hJwRpm88^Nb@N0sEPTm5~5Nysbmbf;kjYn
zdGG@3m*QJAn0g^<^kClQnFgqs%Cn`($;8jr%KxMmr4hwg
z>_*P-)YHd>anZj#Aa9znBTC3)ueAE=`3wvK8dIau^sAJo-SYktGO_Pjl9b+#>N5;?
zh!;=zNV%ruF8h(Pspzm!QK?MDD+52dmA}FEdU5!M+E|f84x^-@FkxC_x~T+35iY0B
zUVc)urp#Q!L9ssPqu|+#Yr>J&Ow8Etv|kyuB413})?*^$_1J^69yW$e)o
zp%N#N`A`?P#qogrIDb{Di`iYj-TUgqLA}eGiHpHdXZh7`w+YF7*}SeUV*?t{db})^kAd-SG!lK!%rTGzHFktMy<@0b+(Mh70-O%nE{1BnU5Y9lw;7|!~)Zo&v
zejGoyW=madtv9Nt-+6!6igz-h9bK{7p51Cx+gU+hTyFk#JS2%g_+w;w&BC|&(=x6q
zL%nDBf=@e1N@N-MJQ;S?i!WOru27m9CGD-t>nkc&n%xj1J=I7Cd)6k?C!E(|Y_UO1!i4_`kg`aTbi-2^$3sT>37D
zA_`^;iey5glL!fTnT6Ip$e$%WR-zi)XlQcU-EF4J?q{Y-CbPnMU9K6QPitKQ50G;n
zMEnnfE`s+X*KsnKy$tz*6$>>VRytNPn4b;ok1kKF&ImWWZsjkNFtmHCvs(r_+^@V(
z?feiI_Ma&>j&XJPU1@L=}fV0Lh}WMSju<6~iEXJKb&0^Pyn;$`n@;>l$1Lh-Lo
z{;MBJa~CsbYe!dW2Yb>#{hF9MxVZ|ElmB_pf4}}Er@5!~|2)aw<=?{s4Upx}H!N(-
ztStZCH>j!LpR4?;)}H2HbR@0q%
z+c*DiDai6?1b-XRztr{DRgk`f;RRX#Tld27FwuCypy?pAmQ>LMeM0=18_;VU^rrpy
zC+IwI=lg|KF&LOAn4F}TrYHDm)(1~bDVz`$ZwFCvSRwF!Wb%AiokIwE`5Ji)D5xVM
zD9j@*RPZCE$^dny+)`L__#g;4CRf<9LYjA>6O@qH&j;u0t^}2N%
z6PY{l40yjp!Jz)PLs7098X|{o0ZHsD7;M1*?-39T1|{5%`oHxc+T905dYR^Vh06cG
zbN~h`y4h>{e~>{D3?e5S2PN-+72%HnF#i$YKN|Rt2L7Xg|7hSp8u-7dfo@g^V`X`E
z^MUN*%FzQl8oD^=8I9ro-UI^xU|8rUW{XT>l3z#$E|++U0x1nsQXR(5p8HH;N5$>3
zD*|10I1BGovgmxnXktb}LDdXIdyDsDtiGxvxCh~dy1O``d4G1?`a6s2#Q5iMz>EmINIbqJkCxoG&mj)f-!4hKw`xIwhm-9sF=nIn-QzQf>gh@S
znDc^AkAwgV
zh>dX)54D%+GEhXr68@4YcORL2!PN)|@;us)
z=QmwVsIHUQxQ~vHHmRqDip)LsO_UxvkwEX2XiDk5!aGR-X;*n)(*`)QQOsN`=eNK8~+t60Clm
ze&fq;F>XN~WzkW{HfI<)3%Q4(jCJ3}%_3NwLyZeYAyfdL+^33tY%KL>&Sy)qGx=ZjUQEn)Glw{G;876SN{F^z~~K5$Njp4PkG+S
z5Ek%Acl@wAH<<%C_x}bUu2$|u5t28;Vvx9;ap2p;``be8mW0y80IEN{i#pX09EJ28
zls!#5RmHVFhMKLw6}~)C7^d>}-Gg(U9=*@HXxab;@V&n=G{71F95AUaUO!fC1rP3-
zMPX@w!M1nk3o)P)a_U826GyO`OM=jM#k@xeRKZQ!N8;NHacCGEZhA`CeXvs6o`q%o
z$7F#8w!xK>Uz%?;E2o5ZO1IDrd-%*=%7;;2KFW)ej2S?A;$U4Avwv-YhSu0EuZatt
zM2s}28c3ZO58G(F<$hm|w-Ov{&v%j@6LEF>LgEH*6UH2^=}B>W=G8OjMs^t8NC$9WH>#f`sGJg{zk3&O|df|jgcLxy^J4<%SjFA2^TadG%gioi`uW@|_p4x!su+UM-2kK4@_nkT^#jdfdR22!cmVFWC{TrPLSOg@g!O=w(*e)_>5p
zfE`p-gUwN1-=OMfz#8i}2%}@!z^dVx?qy6<
z3T*cDj5QwT$JFVQT;A+-0qWZ+i15@)268mrfZF46`Gm(~&K^R;e*r=`2`k|KW4GZ&
z0GER9Jne&Oy$#V+eF^eGrtY%L=FwDyE&c?dy`Ccux6(-qEEcPAMh6aYWrj}9
z<)V=@+CojVPi3=DWYx_p-aU852uc^xjBBjL;3Kmu6F>7jWaEEbVY3Lox*0mj{t!V3
z4q+VUspvJ+zn1l4E2Qv?f6eu(uD#un!hLLt$+lH(u{KSm5_;&h_+Ja^|R;FQQTm8#!1>0wB
z2$FW~)K?4#8Y7L9h&ZXnz1Wy9HeKX9LEYMv#4{VWUb*zaKTjq7)TxZ(oge-g3+P6J
zqouzz%;5`}Mn2^trs463&}mIG5bTJ;As}y6B0U+Q;-9PPKRdASMqTy=gYH@!P$Z+6
zf5(u0ypw5VbqXW%Ow8sD1yl&zhw1FXIC(kZjI=r`+w;t=v<0UWU#3+=+(HJ{$OyDy81Hw3O}!EQG@W8Yu)(ua%1X#kr*X!WW-v;m-0FRol(e
zKF`+K6Z3|UCZE;I;2aPFs}&y%?AE5m4{iS?@FeegCbRyqdJ%imTljYnl9>=-3SkU_
zX@hxPWHX9f(A=N5XV^ko#)J`)`VL@x)UAlVs!c(mN
zryn~L7#x2G6^{525paN`!I<9-R}T2tneR7Mw54VafACG1vYWNCuG4~IWKcgIw)Lld
zve=9?xhcC74SVU3bFmLR4pN{sef-z+0q{#L7kCBN1_fS{8SLKZ$$r}z}_3sO_=
zr)^9G@JUW*V2f$c|2P>ih-f8Iwm=pTiadP!85dHt_w0%R@3n^O)rg27H`onY1jQT<
zKWEq=o(*r}YGSd^&PZS>tfWcr7IQEKZr#RTu=Z#Hhm!E68eOH-Ass2VbJFq#^}l~y
zK{uYrfse0X)bO`Iz;7RCv(e;!eES;QdgYv%WsG_7k4H(^p>C
zeDw1UKTURA?z^|Up1xK4vG&g5@;dqS>Fk`FvU3?E1$A8=*|avk_$EoG@O_A?GzD0>8dqlN-Twdw69>=lY8{7-Wf7F
zbmc#8IXZfOEOl*pTsHV;L8N0D?csw!rNTul(c@@({Ht`VzN}MdCJ8n4-;94V6$%pr
zI34p&wYPa(nx4dfq&C$KsP9Yhykp_u$R?x
zHEuC;q!1FrGNh;)$f_;OdC8dQyj#w~QF^NEv-PFh-9x7SzTVQqt-e0^gC|5?H37TW
z5KME5>uj3LkpU_w+h2;m@tL~C(_7CveC6*JM4HP2nm*y|f&K;Ke8R8iJ@jlA*j{T7
zVVys>Iz%COg!LsBlE+ytvzM5kwEW}A2PQf77nSiGdhS_{MqVBe*&dv4J|<`f(@ON2
zDxfZfU%8eoj5fN?6{-MxREC&Jb@DnLC7V<^o5|2F%XQLvLY2rTr}w%c{|lUmnn2*;
z;c`H12YmFXD$Ed&Vq6Py*t~!+i$XXU
z?2_W~f9}WqwTKs75yK)?y&}eJZ|@%c?c4^VS)
zT5&Lj1ji;)nshS?3KI*c?rv8MjIqFrE?D?FSJpRV9TJHEE_d`$$4*ylNHHR(;D$?R
zUcy3s;X~q@`pA1Msr>LoDh1U|rHow9N`r
zk0xZ7dnF+U!Dz77Q-Kj4N8eeT;32=A{CnO=6F||`wC>-8P!XiLrQIo6iJ~d%R&t41
zhTjy<9H1Wv#r-BZr`Fj7^{8T>7RVxq)>muCQCLL+S
zN$uCvTRYvuWi@7gUR8ZekXK2yOZUuT`@NgfSrC~ZgKv6lPR&A^^PvKd9NXt>GkZRb
zc{ZQ*1#85e2MdlA)rS5TH$hmyN3=XKVTjC_n%PwAq0y9>$aEF$8Y3m&=XcrZi=P3!+7t(0Tro$-#~TP|#-rZ!
zALzU!*A?aQ4R}Ox^i;$8nR8yi2v*naF0mZ>eEY1a%d|M
znDOCZ>d^`E3rTmkP^5>q;Yh`Oc_4-B`tLWaWCHmf=j3x#alM}<=!TKZV0k{V?2toY
ze9Ykh2h_dYq0_QYF5234yX%qVjeK6&36woOnAD^l>+<(R%7IAf41%m7qYB-M5XS4x7hUgu+dKVp<$Tig
zA?+@`W`C5e8U$KwRJ5jusKAr}dI%DfEbmV&{o{lM1eQ7cNM$+~TYpH@2~;(ce~vw%
zb=g}2qw%sz@2k8@rftsh=0MN+VQ>;x}{Qkpi4#(V=qqFdIx;p@ZIdro~SY<*xrVjI)XL4C~C_9UdNDXDJ(
zy3oSe8KI#clu}s(Ly(E&tK&%#{{=~aF-g?>d`ku!{Av&CU?h|p{LYEUS0Y%)NO+Vn=z+9@q^b;OM%hZ<&Z
zht(oi21PgyEM-2}R(Rd>886w_xUq)!a;f}1WibAgqgBFst*#J$CJkpoZNFbxZu%2*zu65*L<|&A>Rtj_
zjLW??*nSNqxE8c9GUuOAN}X*&JR1&Qvd_`Ars>!hX9P*-9dfZF00#|Uc)kGV`~z;z
z2;xY4Q!IeEw7d=nktGF;1mR$3%bP%Zi}C#zOh>QkxX8hLyRaQ*5s7eaddv%?kC0AQ
z1d5jo`n=H1MvsdRoa{D9q@Oj-d$H>g$-$4^9MsqAz@%@_Ka9T89X_VqtaWSn!0`rk
z4n^X0R*0q>t4OcGtpEqczcm?d*^wape8&=_?2vSnaG1
z85tN9^$_gP<=R71$XK^75=w2FU1qC0C3pBdOq<0BS+u%|cK`7nxt!pz0C|jl&k03Q
zjKCQ1jh7>W2xImHXD$Bi9qo5$_+uc9laxT}6`X+~=hy8ffa#0qS=jG`tz*sotVj2#
zfNFUR*%&@C%V&>|-Z~ir43GP?baeh48BghD7$gfy>+RX-B1YvC(S~$6TAwPaci+VY
z2uJ6hgUPDg29`8jZ+_QIcfBTh?|PK1Z4HXM^a0a$pJAa0i;R+^DKFU)hK6MMJugY2
zp||?N)oakAAR$OUDTCBJ)?y#FoC?K>0rY&*)n-=j8F@@}zqIVPY7UlCbxi5pH9Ujs
zALtW3#^Ak?_8H;zkoT40_x@DNXS1dpHo;#l=C^QV>?^J)~%Qe!mx9bA>kT
zq#wyl`qKm736gN(&Hk|e0BexNbNoSPkm$cPH`}=0e^Pv)dmQCTd|S9^Vjg}w!p2vA
zn`|QQI!Gtqi2vgKNb@*|A?P2&JPlm`hy*O>7$l8p~dQy39#|;5K
z8U71EX*!u>&1{MtY<+xi&@G{ezAepCdHHm?0D$?(;~*ofA{=Sx!>uvfY-S%#c~4YW
zc+seSy?&8tVVNUS#;)l;pze$ydW`C0?BopHiN|5_yE_o_%*{vpeRfup9{^;v+vv