diff --git a/.gitignore b/.gitignore index 145bddd88..a35d52c42 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,7 @@ examples/pid/*data/ results/ z_docstring.py TODOs.md -# +# hpo_study*/ hp_study*/ comparisons/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 13075c111..15bc82105 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-ast - id: check-yaml diff --git a/examples/hpo/gp_mpc/config_overrides/quadrotor_2D/gp_mpc_quadrotor_2D_150.yaml b/examples/hpo/gp_mpc/config_overrides/quadrotor_2D/gp_mpc_quadrotor_2D_150.yaml index abe667855..7c3748b2b 100644 --- a/examples/hpo/gp_mpc/config_overrides/quadrotor_2D/gp_mpc_quadrotor_2D_150.yaml +++ b/examples/hpo/gp_mpc/config_overrides/quadrotor_2D/gp_mpc_quadrotor_2D_150.yaml @@ -48,4 +48,4 @@ rand_data_selection: false terminate_train_on_done: True terminate_test_on_done: True - parallel: True \ No newline at end of file + parallel: True diff --git a/examples/hpo/gp_mpc/config_overrides/quadrotor_2D/quadrotor_2D_track.yaml b/examples/hpo/gp_mpc/config_overrides/quadrotor_2D/quadrotor_2D_track.yaml index 4827241f5..a777063d5 100644 --- a/examples/hpo/gp_mpc/config_overrides/quadrotor_2D/quadrotor_2D_track.yaml +++ b/examples/hpo/gp_mpc/config_overrides/quadrotor_2D/quadrotor_2D_track.yaml @@ -70,4 +70,4 @@ task_config: init_z_dot: 0.0 init_theta: 0.0 init_theta_dot: 0.0 - verbose: false \ No newline at end of file + verbose: false diff --git a/examples/hpo/gp_mpc/config_overrides/quadrotor_2D_attitude/gp_mpc_quadrotor_2D_attitude_150.yaml b/examples/hpo/gp_mpc/config_overrides/quadrotor_2D_attitude/gp_mpc_quadrotor_2D_attitude_150.yaml index 20d4f3268..a1a89cc0f 100644 --- a/examples/hpo/gp_mpc/config_overrides/quadrotor_2D_attitude/gp_mpc_quadrotor_2D_attitude_150.yaml +++ b/examples/hpo/gp_mpc/config_overrides/quadrotor_2D_attitude/gp_mpc_quadrotor_2D_attitude_150.yaml @@ -48,4 +48,4 @@ rand_data_selection: false terminate_train_on_done: True terminate_test_on_done: True - parallel: True \ No newline at end of file + parallel: True diff --git a/examples/hpo/gp_mpc/config_overrides/quadrotor_2D_attitude/quadrotor_2D_attitude_track.yaml b/examples/hpo/gp_mpc/config_overrides/quadrotor_2D_attitude/quadrotor_2D_attitude_track.yaml index ae3ca80de..e1561e2d0 100644 --- a/examples/hpo/gp_mpc/config_overrides/quadrotor_2D_attitude/quadrotor_2D_attitude_track.yaml +++ b/examples/hpo/gp_mpc/config_overrides/quadrotor_2D_attitude/quadrotor_2D_attitude_track.yaml @@ -63,4 +63,4 @@ task_config: init_z: 1 init_z_dot: 0.0 init_theta: 0.0 - verbose: false \ No newline at end of file + verbose: false diff --git a/examples/hpo/gp_mpc/gp_mpc_hp_evaluation.sh b/examples/hpo/gp_mpc/gp_mpc_hp_evaluation.sh index b67cffc66..cacd80dd0 100644 --- a/examples/hpo/gp_mpc/gp_mpc_hp_evaluation.sh +++ b/examples/hpo/gp_mpc/gp_mpc_hp_evaluation.sh @@ -45,7 +45,7 @@ done done # 20 training unseen seeds that are unseen during hpo (hpo only saw seeds in [0, 10000]) -seeds=(22403 84244 98825 40417 58454 47838 56715 77833 19880 59009 +seeds=(22403 84244 98825 40417 58454 47838 56715 77833 19880 59009 47722 81354 63825 13296 10779 98122 86221 89144 35192 24759) for seed in "${seeds[@]}"; do diff --git a/examples/hpo/gp_mpc/gp_mpc_hpo.sh b/examples/hpo/gp_mpc/gp_mpc_hpo.sh index de8541b87..951c664ee 100644 --- a/examples/hpo/gp_mpc/gp_mpc_hpo.sh +++ b/examples/hpo/gp_mpc/gp_mpc_hpo.sh @@ -6,7 +6,7 @@ # 2. Remove or backup the database if needed. # 3. Create a screen session `screen`, and detach it `Ctrl+a d`. # 4. Run this script by giving experiment name as the first arg. and the seed as the second. -# 5. If you want to kill them, run `pkill -f "python ./experiments/comparisons/gpmpc/gpmpc_experiment.py"`. +# 5. If you want to kill them, run `pkill -f "python ./experiments/comparisons/gpmpc/gpmpc_experiment.py"`. ##################### cd ~/safe-control-gym @@ -117,4 +117,4 @@ echo "backing up the database" mysqldump --no-tablespaces -u optuna gp_mpc_hpo > gp_mpc_hpo.sql mv gp_mpc_hpo.sql ./examples/hpo/gp_mpc/hpo_study_${sampler}_${sys}/run${experiment_name}/gp_mpc_hpo.sql # remove the database -python ./safe_control_gym/hyperparameters/database.py --func drop --tag gp_mpc_hpo \ No newline at end of file +python ./safe_control_gym/hyperparameters/database.py --func drop --tag gp_mpc_hpo diff --git a/examples/hpo/gp_mpc/main.sh b/examples/hpo/gp_mpc/main.sh index c04997bcf..632454d29 100644 --- a/examples/hpo/gp_mpc/main.sh +++ b/examples/hpo/gp_mpc/main.sh @@ -18,4 +18,4 @@ bash examples/hpo/gp_mpc/gp_mpc_hpo.sh ${run} $((run)) ${sampler} ${localOrHost} done # TODO: eval -bash examples/hpo/gp_mpc/gp_mpc_hp_evaluation.sh ${localOrHost} ${sys} ${task} ${sampler} \ No newline at end of file +bash examples/hpo/gp_mpc/gp_mpc_hp_evaluation.sh ${localOrHost} ${sys} ${task} ${sampler} diff --git a/examples/hpo/hpo_experiment.py b/examples/hpo/hpo_experiment.py index 544c41d79..07032ea1d 100644 --- a/examples/hpo/hpo_experiment.py +++ b/examples/hpo/hpo_experiment.py @@ -4,15 +4,13 @@ import os from functools import partial -import yaml - import matplotlib.pyplot as plt import numpy as np +import yaml from safe_control_gym.envs.benchmark_env import Environment, Task - -from safe_control_gym.hyperparameters.hpo import HPO from safe_control_gym.experiments.base_experiment import BaseExperiment +from safe_control_gym.hyperparameters.hpo import HPO from safe_control_gym.utils.configuration import ConfigFactory from safe_control_gym.utils.registration import make from safe_control_gym.utils.utils import set_device_from_config, set_dir_from_config, set_seed_from_config @@ -123,7 +121,7 @@ def train(config): graph1_2 = 9 graph3_1 = 0 graph3_2 = 4 - + if config.task_config.quad_type != 4: _, ax = plt.subplots() ax.plot(results['obs'][0][:, graph1_1], results['obs'][0][:, graph1_2], 'r--', label='Agent Trajectory') @@ -176,7 +174,7 @@ def train(config): with open(os.path.join(config.output_dir, 'metrics.pkl'), 'wb') as f: import pickle pickle.dump(metrics, f) - + return eval_env.X_GOAL, results, metrics diff --git a/examples/hpo/rl/main.sh b/examples/hpo/rl/main.sh index f286da8cd..4a58c5cbe 100644 --- a/examples/hpo/rl/main.sh +++ b/examples/hpo/rl/main.sh @@ -19,4 +19,4 @@ bash examples/hpo/rl/rl_hpo.sh ${run} $((run+6)) ${sampler} ${localOrHost} ${sys done # eval -bash examples/hpo/rl/rl_hp_evaluation.sh ${localOrHost} ${algo} ${sys} ${task} ${sampler} \ No newline at end of file +bash examples/hpo/rl/rl_hp_evaluation.sh ${localOrHost} ${algo} ${sys} ${task} ${sampler} diff --git a/examples/hpo/rl/quadrotor_2D.sh b/examples/hpo/rl/quadrotor_2D.sh index 16ab117e7..836b35172 100644 --- a/examples/hpo/rl/quadrotor_2D.sh +++ b/examples/hpo/rl/quadrotor_2D.sh @@ -30,4 +30,4 @@ python ./examples/hpo/hpo_experiment.py \ --task quadrotor \ --overrides ./examples/hpo/rl/config_overrides/quadrotor_2D/quadrotor_2D_track.yaml \ ./examples/hpo/rl/sac/config_overrides/quadrotor_2D/sac_quadrotor_2D_.yaml \ ---output_dir ./examples/hpo/results --n_episodes 10 --tag 2D --opt_hps '' --seed 6 --use_gpu True \ No newline at end of file +--output_dir ./examples/hpo/results --n_episodes 10 --tag 2D --opt_hps '' --seed 6 --use_gpu True diff --git a/examples/hpo/rl/rl_hp_evaluation.sh b/examples/hpo/rl/rl_hp_evaluation.sh index f823322ea..75ea8f6ff 100644 --- a/examples/hpo/rl/rl_hp_evaluation.sh +++ b/examples/hpo/rl/rl_hp_evaluation.sh @@ -45,7 +45,7 @@ done done # 20 training unseen seeds that are unseen during hpo (hpo only saw seeds in [0, 10000]) -seeds=(22403 84244 98825 40417 58454 47838 56715 77833 19880 59009 +seeds=(22403 84244 98825 40417 58454 47838 56715 77833 19880 59009 47722 81354 63825 13296 10779 98122 86221 89144 35192 24759) for seed in "${seeds[@]}"; do diff --git a/examples/hpo/rl/rl_hpo.sh b/examples/hpo/rl/rl_hpo.sh index c944d416d..ce5465a18 100644 --- a/examples/hpo/rl/rl_hpo.sh +++ b/examples/hpo/rl/rl_hpo.sh @@ -6,7 +6,7 @@ # 2. Remove or backup the database if needed. # 3. Create a screen session `screen`, and detach it `Ctrl+a d`. # 4. Run this script by giving experiment name as the first arg. and the seed as the second. -# 5. If you want to kill them, run `pkill -f "python ./experiments/comparisons/gpmpc/gpmpc_experiment.py"`. +# 5. If you want to kill them, run `pkill -f "python ./experiments/comparisons/gpmpc/gpmpc_experiment.py"`. ##################### cd ~/safe-control-gym @@ -118,4 +118,4 @@ echo "backing up the database" mysqldump --no-tablespaces -u optuna ${algo}_hpo > ${algo}_hpo.sql mv ${algo}_hpo.sql ./examples/hpo/rl/${algo}/hpo_study_${sampler}_${sys}/run${experiment_name}/${algo}_hpo.sql # remove the database -python ./safe_control_gym/hyperparameters/database.py --func drop --tag ${algo}_hpo \ No newline at end of file +python ./safe_control_gym/hyperparameters/database.py --func drop --tag ${algo}_hpo diff --git a/examples/pid/pid_experiment.py b/examples/pid/pid_experiment.py index 7316c21eb..ed5dee36a 100644 --- a/examples/pid/pid_experiment.py +++ b/examples/pid/pid_experiment.py @@ -9,7 +9,6 @@ import pybullet as p from safe_control_gym.envs.benchmark_env import Environment, Task - from safe_control_gym.experiments.base_experiment import BaseExperiment from safe_control_gym.utils.configuration import ConfigFactory from safe_control_gym.utils.registration import make diff --git a/examples/rl/config_overrides/cartpole/cartpole_stab.yaml b/examples/rl/config_overrides/cartpole/cartpole_stab.yaml index 7ee47f77a..fad908d49 100644 --- a/examples/rl/config_overrides/cartpole/cartpole_stab.yaml +++ b/examples/rl/config_overrides/cartpole/cartpole_stab.yaml @@ -50,7 +50,7 @@ task_config: # RL Reward rew_state_weight: [1, 0.1, 1, 0.1] rew_act_weight: 0.1 - rew_exponential: True + rew_exponential: False # Disturbances disturbances: @@ -62,8 +62,8 @@ task_config: constraints: - constraint_form: default_constraint constrained_variable: state - upper_bounds: [10, 10, 0.2, 0.2] - lower_bounds: [-10, -10, -0.2, -0.2] + upper_bounds: [5.0, 10.0, 0.5, 2.0] + lower_bounds: [-5.0, -10.0, -0.5, -2.0] - constraint_form: default_constraint constrained_variable: input upper_bounds: [10] diff --git a/examples/rl/config_overrides/cartpole/ddpg_cartpole.yaml b/examples/rl/config_overrides/cartpole/ddpg_cartpole.yaml new file mode 100644 index 000000000..8550c6564 --- /dev/null +++ b/examples/rl/config_overrides/cartpole/ddpg_cartpole.yaml @@ -0,0 +1,39 @@ +algo: td3 +algo_config: + # model args + hidden_dim: 128 + activation: 'relu' + + # loss args + gamma: 0.98 + tau: 0.01 + + # noise args + random_process: + func: OrnsteinUhlenbeckProcess + std: + func: LinearSchedule + args: 0.2 + + # optim args + train_interval: 100 + train_batch_size: 64 + actor_lr: 0.003 + critic_lr: 0.003 + + # runner args + max_env_steps: 10000 + warm_up_steps: 50 + rollout_batch_size: 4 + num_workers: 1 + max_buffer_size: 10000 + deque_size: 10 + eval_batch_size: 10 + + # misc + log_interval: 500 + save_interval: 0 + num_checkpoints: 0 + eval_interval: 500 + eval_save_best: False + tensorboard: False diff --git a/examples/rl/config_overrides/cartpole/ppo_cartpole.yaml b/examples/rl/config_overrides/cartpole/ppo_cartpole.yaml index d3d7cdaa0..36a87e58b 100644 --- a/examples/rl/config_overrides/cartpole/ppo_cartpole.yaml +++ b/examples/rl/config_overrides/cartpole/ppo_cartpole.yaml @@ -1,7 +1,7 @@ algo: ppo algo_config: # model args - hidden_dim: 32 + hidden_dim: 128 activation: 'leaky_relu' norm_obs: False norm_reward: False @@ -20,12 +20,12 @@ algo_config: # optim args opt_epochs: 5 mini_batch_size: 128 - actor_lr: 0.0007948148615930024 - critic_lr: 0.007497368468753617 + actor_lr: 0.001 + critic_lr: 0.001 max_grad_norm: 0.5 # runner args - max_env_steps: 720000 + max_env_steps: 50000 num_workers: 1 rollout_batch_size: 4 rollout_steps: 150 @@ -33,9 +33,9 @@ algo_config: eval_batch_size: 10 # misc - log_interval: 6000 + log_interval: 1000 save_interval: 0 num_checkpoints: 0 - eval_interval: 6000 + eval_interval: 1000 eval_save_best: True tensorboard: False diff --git a/examples/rl/config_overrides/cartpole/sac_cartpole.yaml b/examples/rl/config_overrides/cartpole/sac_cartpole.yaml index d9201b492..5e1222631 100644 --- a/examples/rl/config_overrides/cartpole/sac_cartpole.yaml +++ b/examples/rl/config_overrides/cartpole/sac_cartpole.yaml @@ -1,12 +1,12 @@ algo: sac algo_config: # model args - hidden_dim: 256 + hidden_dim: 128 activation: 'relu' # loss args gamma: 0.98 - tau: 0.12145208815621376 + tau: 0.01 init_temperature: 0.2 use_entropy_tuning: False target_entropy: null @@ -14,16 +14,16 @@ algo_config: # optim args train_interval: 100 train_batch_size: 512 - actor_lr: 0.00045196308120485273 - critic_lr: 0.022547326782152065 - entropy_lr: 0.001 + actor_lr: 0.003 + critic_lr: 0.003 + entropy_lr: 0.003 # runner args - max_env_steps: 50000 - warm_up_steps: 100 + max_env_steps: 10000 + warm_up_steps: 50 rollout_batch_size: 4 num_workers: 1 - max_buffer_size: 50000 + max_buffer_size: 10000 deque_size: 10 eval_batch_size: 10 diff --git a/examples/rl/config_overrides/cartpole/td3_cartpole.yaml b/examples/rl/config_overrides/cartpole/td3_cartpole.yaml index e5a13ba98..f242670a3 100644 --- a/examples/rl/config_overrides/cartpole/td3_cartpole.yaml +++ b/examples/rl/config_overrides/cartpole/td3_cartpole.yaml @@ -1,7 +1,7 @@ algo: td3 algo_config: # model args - hidden_dim: 256 + hidden_dim: 128 activation: 'relu' # loss args @@ -15,18 +15,18 @@ algo_config: critic_lr: 0.003 # runner args - max_env_steps: 50000 - warm_up_steps: 100 + max_env_steps: 10000 + warm_up_steps: 50 rollout_batch_size: 4 num_workers: 1 - max_buffer_size: 50000 + max_buffer_size: 10000 deque_size: 10 eval_batch_size: 10 # misc - log_interval: 2000 + log_interval: 500 save_interval: 0 num_checkpoints: 0 - eval_interval: 2000 + eval_interval: 500 eval_save_best: True tensorboard: False diff --git a/examples/rl/config_overrides/quadrotor_2D/quadrotor_2D_track.yaml b/examples/rl/config_overrides/quadrotor_2D/quadrotor_2D_track.yaml index 2aa366fb5..85821ff16 100644 --- a/examples/rl/config_overrides/quadrotor_2D/quadrotor_2D_track.yaml +++ b/examples/rl/config_overrides/quadrotor_2D/quadrotor_2D_track.yaml @@ -89,5 +89,5 @@ task_config: lower_bounds: - 0.06 - 0.06 - done_on_out_of_bound: True + done_on_out_of_bound: False done_on_violation: False diff --git a/examples/rl/config_overrides/quadrotor_2D/td3_quadrotor_2D.yaml b/examples/rl/config_overrides/quadrotor_2D/td3_quadrotor_2D.yaml new file mode 100644 index 000000000..970b9d7a9 --- /dev/null +++ b/examples/rl/config_overrides/quadrotor_2D/td3_quadrotor_2D.yaml @@ -0,0 +1,29 @@ +algo: sac +algo_config: + # model args + hidden_dim: 128 + activation: "relu" + use_entropy_tuning: False + + # optim args + train_interval: 100 + train_batch_size: 256 + actor_lr: 0.001 + critic_lr: 0.001 + + # runner args + max_env_steps: 200000 + warm_up_steps: 1000 + rollout_batch_size: 4 + num_workers: 1 + max_buffer_size: 1000000 + deque_size: 10 + eval_batch_size: 10 + + # misc + log_interval: 4000 + save_interval: 0 + num_checkpoints: 0 + eval_interval: 4000 + eval_save_best: True + tensorboard: False diff --git a/examples/rl/data_analysis.ipynb b/examples/rl/data_analysis.ipynb index 57221c850..a82941cbe 100644 --- a/examples/rl/data_analysis.ipynb +++ b/examples/rl/data_analysis.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [ { @@ -22,20 +22,22 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "# data_paths = {\"ppo\": os.getcwd()+\"/ppo_data/\",\n", "# \"sac\": os.getcwd()+\"/sac_data/\"}\n", - "data_paths = {\"ppo\": os.getcwd()+\"/ppo_data_3/\", \n", - " \"sac\": os.getcwd()+\"/sac_data_3/\"}\n", - "seeds = [i for i in range(0,5)]\n" + "data_paths = {\"ppo\": os.getcwd()+\"/Results/cartpole_ppo_data/\", \n", + " \"sac\": os.getcwd()+\"/Results/cartpole_sac_data/\", \n", + " \"td3\": os.getcwd()+\"/Results/cartpole_td3_data/\", \n", + " \"ddpg\": os.getcwd()+\"/Results/cartpole_ddpg_data/\"}\n", + "seeds = [i for i in range(0,10)]\n" ] }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -61,7 +63,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -69,7 +71,9 @@ "output_type": "stream", "text": [ "ppo\n", - "sac\n" + "sac\n", + "td3\n", + "ddpg\n" ] } ], @@ -80,558 +84,565 @@ " perf_data.update({method: {}})\n", " for seed in seeds:\n", " xk, x, yk, y = load_from_log_file(data_paths[method] + str(seed) + \"/logs/stat_eval/ep_return.log\")\n", - " # xk, x, zk, z = load_from_log_file(data_paths[method] + str(seed) + \"/logs/stat_eval/ep_return_std.log\")\n", + " xk, x, zk, z = load_from_log_file(data_paths[method] + str(seed) + \"/logs/stat_eval/ep_return_std.log\")\n", " xk, x, ck, c = load_from_log_file(data_paths[method] + str(seed) + \"/logs/stat_eval/constraint_violation.log\")\n", - " perf_data[method].update({seed: {\"x\": x, \"y\": y, \"c\": c}})\n", - " # perf_data[method].update({seed: {\"x\": x, \"y\": y, \"z\": z, \"c\": c}})" + " # perf_data[method].update({seed: {\"x\": x, \"y\": y, \"c\": c}})\n", + " perf_data[method].update({seed: {\"x\": x, \"y\": y, \"z\": z, \"c\": c}})" ] }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 5, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "{'ppo': {0: {'x': array([ 6000., 12000., 18000., 24000., 30000., 36000., 42000.,\n", - " 48000., 54000., 60000., 66000., 72000., 78000., 84000.,\n", - " 90000., 96000., 102000., 108000., 114000., 120000., 126000.,\n", - " 132000., 138000., 144000., 150000., 156000., 162000., 168000.,\n", - " 174000., 180000., 186000., 192000., 198000., 204000., 210000.,\n", - " 216000., 222000., 228000., 234000., 240000., 246000., 252000.,\n", - " 258000., 264000., 270000., 276000., 282000., 288000., 294000.,\n", - " 300000., 306000., 312000., 318000., 324000., 330000., 336000.,\n", - " 342000., 348000., 354000., 360000., 366000., 372000., 378000.,\n", - " 384000., 390000., 396000., 402000., 408000., 414000., 420000.,\n", - " 426000., 432000., 438000., 444000., 450000., 456000., 462000.,\n", - " 468000., 474000., 480000., 486000., 492000., 498000., 504000.,\n", - " 510000., 516000., 522000., 528000., 534000., 540000., 546000.,\n", - " 552000., 558000., 564000., 570000., 576000., 582000., 588000.,\n", - " 594000., 600000., 606000., 612000., 618000., 624000., 630000.,\n", - " 636000., 642000., 648000., 654000., 660000., 666000., 672000.,\n", - " 678000., 684000., 690000., 696000., 702000., 708000., 714000.,\n", - " 720000.]),\n", - " 'y': array([-3.45498635e+03, -3.48254554e+03, -1.69217232e+03, -1.08838016e+03,\n", - " -9.39585496e+02, -6.72625696e+02, -5.56809882e+02, -5.39169950e+02,\n", - " -5.16726576e+02, -4.48014944e+02, -4.52004794e+02, -4.79347985e+02,\n", - " -4.59744974e+02, -4.56071579e+02, -4.51917542e+02, -4.49418410e+02,\n", - " -4.26013786e+02, -4.25260157e+02, -4.26273925e+02, -4.20489256e+02,\n", - " -4.12712053e+02, -4.11918704e+02, -4.07599886e+02, -4.11589700e+02,\n", - " -4.16310611e+02, -4.22223582e+02, -4.02759465e+02, -3.90424556e+02,\n", - " -4.16063878e+02, -4.03101372e+02, -3.92953130e+02, -4.07499726e+02,\n", - " -4.13173671e+02, -3.84699116e+02, -3.89044258e+02, -3.91292563e+02,\n", - " -3.79954833e+02, -3.58240788e+02, -3.80524156e+02, -3.86084518e+02,\n", - " -3.77026946e+02, -3.72266822e+02, -3.76361099e+02, -3.77472582e+02,\n", - " -3.58353346e+02, -3.70557575e+02, -3.76114750e+02, -3.87815346e+02,\n", - " -3.69684531e+02, -3.62254136e+02, -3.65242684e+02, -3.60853945e+02,\n", - " -3.61192229e+02, -3.53211188e+02, -3.42543957e+02, -3.41361376e+02,\n", - " -3.20797985e+02, -3.39806086e+02, -3.50114993e+02, -3.23611739e+02,\n", - " -3.39287357e+02, -3.07193556e+02, -3.08879218e+02, -3.08418859e+02,\n", - " -2.81071002e+02, -3.17164044e+02, -3.12163944e+02, -2.98741055e+02,\n", - " -2.53012678e+02, -2.65612249e+02, -2.22334254e+02, -1.93236216e+02,\n", - " -7.58022565e+01, -4.01795799e+01, -1.05472220e+01, -5.11317381e+01,\n", - " -1.20507554e+01, -5.42344219e+00, -6.86605157e+00, -1.01770392e+01,\n", - " -7.26960760e+00, -1.26251993e+01, -1.19577866e+01, -6.93906030e+00,\n", - " -1.03768145e+01, -6.33331620e+00, -7.71478413e+00, -6.77252206e+00,\n", - " -7.38488240e+00, -7.10425718e+00, -7.47550590e+00, -6.63635211e+00,\n", - " -7.43633755e+00, -7.17921542e+00, -5.77101839e+00, -5.14521805e+00,\n", - " -5.34710781e+00, -5.92730431e+00, -4.65211391e+00, -5.97789747e+00,\n", - " -6.08508513e+00, -5.36868486e+00, -4.23034163e+00, -4.90287717e+00,\n", - " -4.63733288e+00, -5.97522586e+00, -4.08198678e+00, -4.27246695e+00,\n", - " -4.41333018e+00, -4.11989374e+00, -4.32460065e+00, -3.67991151e+00,\n", - " -4.41596619e+00, -4.03332029e+00, -3.45852920e+00, -3.29081237e+00,\n", - " -3.84996114e+00, -3.35997062e+00, -3.22459115e+00, -2.82396909e+00]),\n", - " 'c': array([73.9, 74. , 72.8, 70.1, 69.9, 69.7, 70.1, 72.5, 73.1, 73.8, 74.6,\n", - " 75. , 74.8, 74.7, 74.8, 74.9, 74.5, 73.8, 74.8, 74.8, 74. , 73.3,\n", - " 74.7, 74.8, 74.4, 75. , 74.1, 73.6, 74.4, 73.9, 72.5, 75. , 73.2,\n", - " 73.3, 72.5, 74.4, 74. , 67.9, 73.1, 71.7, 72.4, 70.8, 74.2, 74.3,\n", - " 74.7, 74.6, 75. , 75. , 71.4, 73.7, 73.7, 74.7, 75. , 71.3, 68. ,\n", - " 66.3, 63.1, 67. , 67.5, 61.6, 62.3, 58.5, 57.9, 57.1, 52.7, 57.9,\n", - " 55.6, 52.8, 47.5, 48.2, 43.9, 39.5, 18.7, 14. , 1.5, 6.2, 0. ,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0.6, 0.7, 0. , 0.2, 0. ,\n", - " 0. , 0. , 0.5, 0. , 0. , 0.2, 0.8, 0. , 0.8, 0.3, 0. ,\n", - " 1. , 0.8, 0.2, 0. , 0. , 0. , 0.3, 0.1, 0.8, 0.3])},\n", - " 1: {'x': array([ 6000., 12000., 18000., 24000., 30000., 36000., 42000.,\n", - " 48000., 54000., 60000., 66000., 72000., 78000., 84000.,\n", - " 90000., 96000., 102000., 108000., 114000., 120000., 126000.,\n", - " 132000., 138000., 144000., 150000., 156000., 162000., 168000.,\n", - " 174000., 180000., 186000., 192000., 198000., 204000., 210000.,\n", - " 216000., 222000., 228000., 234000., 240000., 246000., 252000.,\n", - " 258000., 264000., 270000., 276000., 282000., 288000., 294000.,\n", - " 300000., 306000., 312000., 318000., 324000., 330000., 336000.,\n", - " 342000., 348000., 354000., 360000., 366000., 372000., 378000.,\n", - " 384000., 390000., 396000., 402000., 408000., 414000., 420000.,\n", - " 426000., 432000., 438000., 444000., 450000., 456000., 462000.,\n", - " 468000., 474000., 480000., 486000., 492000., 498000., 504000.,\n", - " 510000., 516000., 522000., 528000., 534000., 540000., 546000.,\n", - " 552000., 558000., 564000., 570000., 576000., 582000., 588000.,\n", - " 594000., 600000., 606000., 612000., 618000., 624000., 630000.,\n", - " 636000., 642000., 648000., 654000., 660000., 666000., 672000.,\n", - " 678000., 684000., 690000., 696000., 702000., 708000., 714000.,\n", - " 720000.]),\n", - " 'y': array([-2081.1920889 , -767.02743925, -664.38848272, -556.69336949,\n", - " -579.07925006, -551.19637447, -652.26681045, -662.51038193,\n", - " -478.91299177, -502.94201174, -469.50664575, -492.12584815,\n", - " -484.18153129, -468.52696592, -487.01721154, -514.17484799,\n", - " -473.0710636 , -471.62070198, -426.72455126, -421.75646457,\n", - " -409.64367927, -406.27356257, -418.10605297, -420.78321586,\n", - " -407.21932793, -408.94683318, -416.12102703, -410.68354116,\n", - " -389.23455677, -388.51677625, -384.8237186 , -387.13882778,\n", - " -392.56779655, -374.23316825, -377.61513994, -455.0377581 ,\n", - " -378.99716513, -366.55528553, -369.84259985, -369.63554248,\n", - " -373.86415388, -366.92398869, -363.85408277, -353.31190403,\n", - " -362.35191491, -363.00179269, -367.04453729, -445.82299096,\n", - " -384.75088711, -387.41522854, -413.42176036, -347.77848016,\n", - " -411.0996394 , -367.06053602, -381.71598643, -347.70598292,\n", - " -368.8461308 , -375.16817515, -372.16689181, -362.33045054,\n", - " -355.06020438, -363.82624938, -436.78482351, -355.47535361,\n", - " -356.20137162, -340.27150483, -356.8154747 , -355.86750986,\n", - " -357.28509399, -357.81957067, -355.49893022, -345.97537624,\n", - " -348.30000585, -432.94481922, -339.59353861, -355.08261821,\n", - " -348.65390885, -344.99835651, -356.18870413, -350.81615303,\n", - " -351.97187517, -443.32537408, -355.40992579, -342.09443648,\n", - " -349.17013918, -349.86271621, -351.77586711, -335.98053271,\n", - " -346.19767946, -353.25353305, -359.46394378, -341.46325383,\n", - " -348.82398305, -350.23307841, -355.61201976, -357.28338508,\n", - " -344.11499911, -351.36025522, -348.26705685, -350.7510658 ,\n", - " -367.13279963, -359.86400272, -345.76529293, -359.97636785,\n", - " -601.14395951, -358.85283193, -584.56042471, -345.19924877,\n", - " -354.48505366, -381.35473492, -335.38246286, -350.53859738,\n", - " -337.45949874, -344.60211967, -340.53707695, -331.14450892,\n", - " -344.98154363, -344.24920315, -344.98288129, -343.40420561]),\n", - " 'c': array([71.8, 69.6, 69.5, 68.1, 70.2, 68.7, 71.3, 70.3, 72.5, 74. , 72.9,\n", - " 71. , 74.7, 74.5, 74.8, 75. , 74.8, 75. , 74.8, 75. , 74.7, 74.4,\n", - " 75. , 75. , 75. , 75. , 75. , 75. , 75. , 74.5, 74.9, 75. , 75. ,\n", - " 75. , 73.8, 68. , 74.7, 72. , 74.8, 73.7, 74.9, 73.5, 71.3, 70.5,\n", - " 72.1, 73.3, 74. , 69.1, 74.3, 73.9, 68.8, 67.1, 69.1, 71.8, 73.3,\n", - " 67.1, 74.8, 74.9, 72.3, 72.4, 72. , 72.4, 71.8, 72.5, 73.4, 69.2,\n", - " 74.2, 73.8, 73.6, 74.6, 73.1, 71.3, 71.4, 62.2, 69.2, 73.7, 73.3,\n", - " 71.7, 74.1, 71.7, 72.7, 69.9, 73.6, 70.9, 72.7, 72.4, 72.9, 69.5,\n", - " 71.7, 70.5, 72.2, 68.8, 69.7, 71.2, 71.9, 71.9, 69.2, 70.9, 69.2,\n", - " 70.5, 73.7, 72.7, 68.9, 71.7, 66.1, 72.6, 61.2, 69.6, 72.9, 69.2,\n", - " 68.3, 72. , 69.4, 70.6, 71. , 68.3, 72. , 71.7, 71.4, 71.1])},\n", - " 2: {'x': array([ 6000., 12000., 18000., 24000., 30000., 36000., 42000.,\n", - " 48000., 54000., 60000., 66000., 72000., 78000., 84000.,\n", - " 90000., 96000., 102000., 108000., 114000., 120000., 126000.,\n", - " 132000., 138000., 144000., 150000., 156000., 162000., 168000.,\n", - " 174000., 180000., 186000., 192000., 198000., 204000., 210000.,\n", - " 216000., 222000., 228000., 234000., 240000., 246000., 252000.,\n", - " 258000., 264000., 270000., 276000., 282000., 288000., 294000.,\n", - " 300000., 306000., 312000., 318000., 324000., 330000., 336000.,\n", - " 342000., 348000., 354000., 360000., 366000., 372000., 378000.,\n", - " 384000., 390000., 396000., 402000., 408000., 414000., 420000.,\n", - " 426000., 432000., 438000., 444000., 450000., 456000., 462000.,\n", - " 468000., 474000., 480000., 486000., 492000., 498000., 504000.,\n", - " 510000., 516000., 522000., 528000., 534000., 540000., 546000.,\n", - " 552000., 558000., 564000., 570000., 576000., 582000., 588000.,\n", - " 594000., 600000., 606000., 612000., 618000., 624000., 630000.,\n", - " 636000., 642000., 648000., 654000., 660000., 666000., 672000.,\n", - " 678000., 684000., 690000., 696000., 702000., 708000., 714000.,\n", - " 720000.]),\n", - " 'y': array([-1240.60041327, -963.63139598, -933.52679612, -1131.10355204,\n", - " -797.52509207, -662.40552076, -574.98780263, -610.18861149,\n", - " -561.01239901, -538.23793379, -549.24297697, -520.39969498,\n", - " -508.72431756, -538.17116853, -556.86095618, -520.60999772,\n", - " -497.7539633 , -488.56380084, -466.66970327, -419.68478527,\n", - " -421.97702842, -418.0305072 , -422.67973353, -428.11335682,\n", - " -426.46057749, -357.53499607, -385.71559751, -368.90374207,\n", - " -371.60802584, -366.10374323, -360.92791998, -336.31133052,\n", - " -357.67273717, -338.80344547, -343.16303846, -326.57097767,\n", - " -321.02295945, -295.81569028, -276.95622882, -277.21966718,\n", - " -228.05912738, -227.91568619, -169.96712274, -90.88897106,\n", - " -123.42966779, -28.46566211, -15.56563772, -7.15888297,\n", - " -36.51003417, -5.74373714, -9.33690049, -5.42814785,\n", - " -11.03163926, -6.60145269, -6.89237933, -5.42834179,\n", - " -5.25545599, -6.90976747, -5.97435857, -5.19635841,\n", - " -6.44141575, -5.79745515, -4.4257237 , -4.08032538,\n", - " -3.99509375, -4.41206908, -8.61599332, -5.03720983,\n", - " -6.67571018, -4.07700952, -3.67254726, -4.86934642,\n", - " -4.9059107 , -3.76598201, -3.33319148, -3.49251006,\n", - " -2.86883772, -3.08877263, -2.55720285, -2.98141752,\n", - " -3.57704538, -2.71985706, -2.67762663, -3.05164586,\n", - " -2.97935712, -2.96721613, -2.60411183, -3.19708231,\n", - " -3.35083527, -2.97248639, -2.59229256, -3.02956229,\n", - " -2.98026883, -2.45492623, -2.6644369 , -2.6021496 ,\n", - " -2.88039605, -3.24528222, -3.21193219, -2.89837046,\n", - " -3.1984164 , -2.94590302, -2.80894181, -3.40852402,\n", - " -3.47659554, -2.97008327, -3.22899519, -2.87973584,\n", - " -2.71630854, -2.73793177, -3.21000082, -2.17388668,\n", - " -2.92101956, -2.94172411, -3.21990413, -2.56106493,\n", - " -2.65046665, -2.31605517, -2.96261584, -3.09033211]),\n", - " 'c': array([68.2, 70.6, 73.8, 74. , 74.3, 74.3, 74.5, 75. , 74.9, 75. , 75. ,\n", - " 75. , 74.9, 75. , 75. , 74.9, 74.2, 74.1, 74. , 72.8, 70.8, 69. ,\n", - " 71.7, 69.5, 70.5, 63.6, 66.6, 66.1, 67.4, 64.6, 68.1, 64.4, 67.9,\n", - " 63. , 63.4, 61.9, 60.8, 51.7, 50.9, 45.3, 42.3, 38.9, 32.4, 24.6,\n", - " 22.3, 13. , 5.7, 2.1, 4.4, 0. , 0. , 0.2, 0. , 1.4, 0. ,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.1, 0.5, 1.4, 0.1,\n", - " 0.1, 0. , 1.3, 1.1, 0.5, 0. , 0.6, 0.4, 0. , 0. , 0. ,\n", - " 0. , 0.1, 0. , 0.5, 0.3, 0. , 0. , 0.1, 0.3, 0.5, 1.4,\n", - " 0.5, 0.9, 0. , 0.4, 0. , 0. , 0. , 0.2, 0.3, 0.8, 0.9,\n", - " 0.6, 0. , 0.2, 0.8, 1.7, 0.7, 0.8, 0.4, 0.2, 0.6])},\n", - " 3: {'x': array([ 6000., 12000., 18000., 24000., 30000., 36000., 42000.,\n", - " 48000., 54000., 60000., 66000., 72000., 78000., 84000.,\n", - " 90000., 96000., 102000., 108000., 114000., 120000., 126000.,\n", - " 132000., 138000., 144000., 150000., 156000., 162000., 168000.,\n", - " 174000., 180000., 186000., 192000., 198000., 204000., 210000.,\n", - " 216000., 222000., 228000., 234000., 240000., 246000., 252000.,\n", - " 258000., 264000., 270000., 276000., 282000., 288000., 294000.,\n", - " 300000., 306000., 312000., 318000., 324000., 330000., 336000.,\n", - " 342000., 348000., 354000., 360000., 366000., 372000., 378000.,\n", - " 384000., 390000., 396000., 402000., 408000., 414000., 420000.,\n", - " 426000., 432000., 438000., 444000., 450000., 456000., 462000.,\n", - " 468000., 474000., 480000., 486000., 492000., 498000., 504000.,\n", - " 510000., 516000., 522000., 528000., 534000., 540000., 546000.,\n", - " 552000., 558000., 564000., 570000., 576000., 582000., 588000.,\n", - " 594000., 600000., 606000., 612000., 618000., 624000., 630000.,\n", - " 636000., 642000., 648000., 654000., 660000., 666000., 672000.,\n", - " 678000., 684000., 690000., 696000., 702000., 708000., 714000.,\n", - " 720000.]),\n", - " 'y': array([-2595.02411546, -626.96935183, -1131.43018822, -1195.8927787 ,\n", - " -763.65612462, -472.72976861, -541.103559 , -563.46433626,\n", - " -626.21682835, -747.53263298, -441.85633566, -462.61558725,\n", - " -522.9066225 , -465.43199322, -538.96540754, -572.26465804,\n", - " -494.80899685, -477.13448196, -504.9487667 , -571.82776378,\n", - " -552.79505827, -516.10605354, -509.14388681, -472.19877673,\n", - " -479.54992597, -432.09361844, -436.73794658, -433.15148463,\n", - " -435.34208856, -435.28711362, -465.20989547, -452.17478931,\n", - " -411.51242034, -395.93722743, -388.59555641, -379.19863292,\n", - " -388.21011412, -411.35449381, -426.59421814, -373.31696818,\n", - " -379.87577367, -375.40659408, -388.49752338, -389.30409551,\n", - " -334.95160598, -358.35489009, -350.93179106, -302.88536045,\n", - " -354.25129305, -335.79367604, -329.38243738, -321.67400011,\n", - " -319.01441642, -300.8265001 , -328.50121454, -309.93972774,\n", - " -331.92052863, -306.04063366, -282.2496594 , -298.84995348,\n", - " -246.15496935, -258.3800972 , -281.64338628, -140.45881047,\n", - " -37.29127031, -36.0559548 , -53.16869903, -46.26902494,\n", - " -49.97242236, -58.32966641, -93.352066 , -24.35904422,\n", - " -24.31791836, -20.9612136 , -39.49524233, -18.08699778,\n", - " -19.01596288, -26.04138062, -6.36591305, -6.38827493,\n", - " -15.96207608, -8.48960392, -13.44429045, -7.65279237,\n", - " -13.20140687, -13.82495993, -7.44101084, -9.68309375,\n", - " -10.20882696, -9.96731366, -6.6257758 , -7.2165802 ,\n", - " -8.10285364, -8.4586155 , -7.43066755, -6.78135098,\n", - " -5.92453175, -5.88137233, -5.00803298, -5.85961277,\n", - " -5.54659623, -5.13442338, -4.92646169, -5.14089609,\n", - " -6.60575334, -4.85870038, -4.65111146, -4.25241617,\n", - " -3.71307123, -4.3216041 , -4.33601475, -3.61835982,\n", - " -3.03441758, -3.66246107, -3.1128362 , -3.35817505,\n", - " -2.96271382, -3.05400754, -2.73293895, -2.98145527]),\n", - " 'c': array([70.7, 68.5, 73.3, 73.5, 72.6, 71. , 71.4, 71.5, 71.9, 71.9, 68.3,\n", - " 71.6, 73.3, 73.2, 74.1, 74. , 73.4, 73. , 74. , 75. , 74.6, 73.3,\n", - " 72.2, 69.1, 69.6, 68.8, 68.2, 69. , 70.5, 72.1, 66.9, 67.9, 72.9,\n", - " 69.1, 66.4, 67.3, 69.8, 67.6, 68.9, 65.8, 64.5, 64.8, 68.1, 67.7,\n", - " 63.4, 66.8, 63.9, 59.2, 66.9, 63.3, 61.7, 60.1, 61.7, 57.3, 58.4,\n", - " 56.6, 60.1, 47.5, 49. , 56.6, 42.3, 30.4, 43. , 23.3, 18.5, 10.2,\n", - " 18.8, 13.3, 14.3, 12.4, 17.5, 5.6, 17.3, 3.3, 5.7, 0. , 0.3,\n", - " 2.7, 0.2, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", - " 0.5, 0. , 0. , 0.8, 0.3, 0. , 0.2, 0.2, 0. , 0.3])},\n", - " 4: {'x': array([ 6000., 12000., 18000., 24000., 30000., 36000., 42000.,\n", - " 48000., 54000., 60000., 66000., 72000., 78000., 84000.,\n", - " 90000., 96000., 102000., 108000., 114000., 120000., 126000.,\n", - " 132000., 138000., 144000., 150000., 156000., 162000., 168000.,\n", - " 174000., 180000., 186000., 192000., 198000., 204000., 210000.,\n", - " 216000., 222000., 228000., 234000., 240000., 246000., 252000.,\n", - " 258000., 264000., 270000., 276000., 282000., 288000., 294000.,\n", - " 300000., 306000., 312000., 318000., 324000., 330000., 336000.,\n", - " 342000., 348000., 354000., 360000., 366000., 372000., 378000.,\n", - " 384000., 390000., 396000., 402000., 408000., 414000., 420000.,\n", - " 426000., 432000., 438000., 444000., 450000., 456000., 462000.,\n", - " 468000., 474000., 480000., 486000., 492000., 498000., 504000.,\n", - " 510000., 516000., 522000., 528000., 534000., 540000., 546000.,\n", - " 552000., 558000., 564000., 570000., 576000., 582000., 588000.,\n", - " 594000., 600000., 606000., 612000., 618000., 624000., 630000.,\n", - " 636000., 642000., 648000., 654000., 660000., 666000., 672000.,\n", - " 678000., 684000., 690000., 696000., 702000., 708000., 714000.,\n", - " 720000.]),\n", - " 'y': array([-1550.3030233 , -1194.16134375, -547.57532582, -493.81109201,\n", - " -437.57301663, -549.26819188, -480.53350037, -475.20255316,\n", - " -521.82564544, -495.79329059, -497.71918014, -452.92134434,\n", - " -451.136396 , -417.63775488, -419.36086728, -375.53836589,\n", - " -371.4125876 , -378.32348377, -360.87733368, -338.07126206,\n", - " -350.05825794, -322.89697983, -317.32738299, -350.74750673,\n", - " -306.42923738, -305.67552043, -322.8665141 , -306.34129108,\n", - " -311.33556946, -324.36342826, -307.38141463, -338.4538372 ,\n", - " -329.61799469, -327.34825033, -302.85241539, -302.36759671,\n", - " -322.55479976, -306.28983573, -298.29725025, -287.77241567,\n", - " -309.39093364, -325.4298063 , -306.73540565, -303.9549021 ,\n", - " -312.9440672 , -279.2545339 , -294.21470557, -304.48225366,\n", - " -230.75983679, -251.61120584, -243.16939196, -119.25630518,\n", - " -112.18839761, -178.80070378, -52.21445805, -39.78892998,\n", - " -71.21581147, -22.72713195, -7.86372062, -7.85573913,\n", - " -6.81612832, -11.20393364, -16.51379594, -8.58449148,\n", - " -7.43806799, -5.73170687, -8.90419091, -7.72675621,\n", - " -13.95507499, -6.84218247, -8.31632738, -7.47147952,\n", - " -5.60119446, -4.12198354, -4.28636171, -6.93778368,\n", - " -183.26089237, -96.17466364, -4.91467823, -4.32119196,\n", - " -4.27990747, -4.16174641, -3.81602239, -6.1748364 ,\n", - " -6.21093074, -5.65926038, -6.04845082, -4.1200113 ,\n", - " -4.5630639 , -4.86188794, -3.98616056, -3.91942842,\n", - " -4.28392977, -4.41924144, -4.54479538, -4.92331199,\n", - " -3.23931337, -3.4145908 , -2.86732281, -3.07239207,\n", - " -3.0413795 , -3.27620814, -2.75215809, -4.5498506 ,\n", - " -3.43125751, -3.21464912, -3.01030865, -3.16263018,\n", - " -2.61943788, -2.93399824, -2.65181488, -3.28609981,\n", - " -3.33049908, -3.19436275, -2.76401025, -2.68017314,\n", - " -2.45776323, -9.37469983, -332.48028013, -356.40135888]),\n", - " 'c': array([69.7, 72.7, 67.7, 70.9, 72.2, 73.1, 74.2, 74.9, 75. , 74.8, 75. ,\n", - " 74.5, 74.5, 73.6, 72.9, 65.6, 71.4, 74. , 69. , 65.9, 72. , 67. ,\n", - " 65. , 63.9, 62.7, 62.6, 64. , 62.3, 64.9, 67.5, 64.2, 67.3, 64.4,\n", - " 65.2, 64. , 58.6, 62.3, 59. , 58.1, 54.2, 60.2, 67.2, 61.6, 55.7,\n", - " 59.1, 52.7, 55.5, 58.7, 42.5, 46.9, 45.4, 22.5, 24.5, 39.1, 13.5,\n", - " 10.9, 17.8, 5.5, 2.5, 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.5, 5.9, 29.4,\n", - " 23.4, 1.3, 0.6, 1.8, 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.2, 0.2, 0. ,\n", - " 0.3, 0.8, 1.2, 1.2, 0.3, 1.1, 0.8, 1.3, 1. , 0.9, 1.5,\n", - " 0.6, 2. , 3.2, 0.7, 0.4, 1. , 0.6, 6.5, 36.6, 43.3])}},\n", + "{'ppo': {0: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([ -88.15775064, -110.80896017, -116.10320637, -119.46548994,\n", + " -92.09106174, -64.25393533, -47.48677165, -69.30662023,\n", + " -60.30467331, -88.17607396, -23.59048325, -90.87252346,\n", + " -15.25030234, -8.78282656, -5.79158202, -7.30442645]),\n", + " 'z': array([ 4.15811497, 23.13705735, 10.31592974, 8.94511162, 5.67150428,\n", + " 39.36820806, 31.65101058, 33.26866465, 30.23007339, 14.44750956,\n", + " 17.70665165, 2.97173683, 6.8311226 , 1.72437304, 2.47379934,\n", + " 3.92283254]),\n", + " 'c': array([28.1, 28.2, 31.7, 34.4, 26.6, 14.7, 13.2, 17.9, 16.4, 23.1, 7. ,\n", + " 22.4, 2.9, 0.2, 0. , 0. ])},\n", + " 1: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([-67.91610023, -53.40671503, -64.62514255, -66.35840838,\n", + " -87.81387889, -94.22731699, -90.24905498, -58.79075258,\n", + " -44.35972097, -61.40454903, -9.30498245, -8.80209441,\n", + " -7.6044643 , -7.59277906, -4.68156383, -4.40672849]),\n", + " 'z': array([30.728283 , 29.74882595, 25.55996407, 30.31901437, 11.65889713,\n", + " 4.10021515, 16.68536504, 26.20849348, 38.80456634, 31.32378191,\n", + " 2.48632591, 1.37589148, 1.81055437, 1.06387001, 1.05117242,\n", + " 1.70916467]),\n", + " 'c': array([20.8, 17. , 20.5, 18.4, 28.6, 29.4, 26.4, 16.6, 9.4, 16.2, 0. ,\n", + " 0. , 0. , 0. , 0. , 0. ])},\n", + " 2: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([-100.39980478, -73.04375601, -93.34557955, -89.92046596,\n", + " -85.59018073, -51.13473029, -81.70017771, -70.21202545,\n", + " -60.47638877, -60.65871857, -65.39839124, -35.76429277,\n", + " -13.97048527, -7.68179415, -10.19019997, -8.95580367]),\n", + " 'z': array([ 2.19992819, 21.34051787, 3.05532206, 3.3999837 , 1.65030495,\n", + " 37.66485366, 7.77635885, 26.57867691, 33.14778177, 34.87602746,\n", + " 20.90141774, 22.53970922, 1.42179221, 1.70779423, 1.46701376,\n", + " 1.61871985]),\n", + " 'c': array([31.3, 22.3, 34.5, 31.7, 31.2, 16.5, 25.6, 21. , 16. , 15.5, 16.3,\n", + " 9.2, 0.9, 0. , 0. , 0. ])},\n", + " 3: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([-83.02402423, -71.77818481, -82.05796674, -90.54551854,\n", + " -85.56271133, -85.31685532, -84.4121318 , -90.78979229,\n", + " -47.90072649, -25.51680556, -8.27271889, -11.85757268,\n", + " -15.75341708, -8.37725403, -4.88154136, -6.44937024]),\n", + " 'z': array([ 3.78417064, 22.99658609, 0.57995264, 2.58985925, 2.99997822,\n", + " 3.71056011, 4.35576055, 3.16850439, 29.38641626, 21.39618361,\n", + " 4.74721975, 3.30760283, 5.2505719 , 1.44092333, 1.77630397,\n", + " 2.14031609]),\n", + " 'c': array([29.4, 23.5, 32.2, 33.2, 27.5, 28.9, 26.5, 33.3, 13.5, 6.4, 0.5,\n", + " 1.5, 1.5, 0. , 0. , 0. ])},\n", + " 4: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([ -78.25603452, -88.15755932, -80.85823162, -135.88484029,\n", + " -118.20620823, -84.56251778, -99.15056706, -61.05523057,\n", + " -45.49561499, -57.24070411, -59.42362142, -43.99554657,\n", + " -35.63985117, -40.93778084, -32.83323752, -21.4385759 ]),\n", + " 'z': array([21.53768478, 22.37562905, 2.08860279, 5.38289836, 7.55217166,\n", + " 5.50235804, 21.73379088, 40.02883866, 36.12166915, 30.18096517,\n", + " 28.59859709, 31.14269571, 28.420123 , 25.70969362, 20.99195807,\n", + " 6.80190971]),\n", + " 'c': array([27. , 24.6, 29.4, 37.2, 35.9, 29.3, 28.7, 18.7, 12.9, 16.6, 17.1,\n", + " 10.9, 8.6, 10.9, 8.6, 2.8])},\n", + " 5: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([ -95.01403974, -123.79892124, -102.7442057 , -89.815646 ,\n", + " -39.66297729, -46.9252954 , -67.41175488, -11.8282033 ,\n", + " -14.71989308, -6.40495247, -10.55180597, -6.38568054,\n", + " -6.68760819, -4.69920376, -4.80615511, -4.50284142]),\n", + " 'z': array([14.30988285, 12.37001644, 9.79027082, 21.98219231, 34.02801021,\n", + " 34.4690545 , 31.91678882, 3.58677587, 3.01341989, 1.91630836,\n", + " 1.59130328, 1.15350047, 1.79735957, 1.34250451, 1.36681713,\n", + " 1.08033008]),\n", + " 'c': array([24.3, 31.7, 29. , 26.1, 11.1, 10.9, 16.8, 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. ])},\n", + " 6: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([-65.32806028, -83.67334789, -83.91174635, -78.10015514,\n", + " -73.11778666, -67.93214535, -48.66061502, -64.25442483,\n", + " -60.3782076 , -17.24724205, -13.53857026, -10.07927537,\n", + " -7.24454048, -7.77040104, -5.06210837, -6.25896362]),\n", + " 'z': array([31.58735223, 0.74757479, 0.71705823, 21.69671301, 27.93631289,\n", + " 26.89142816, 34.08134048, 23.3786628 , 23.3090608 , 16.39541779,\n", + " 15.96755933, 1.22367773, 3.40950729, 1.41821465, 1.07917687,\n", + " 2.53668562]),\n", + " 'c': array([19.4, 31.3, 33.9, 27. , 17.9, 18.5, 14.3, 18. , 15.4, 4.4, 1.6,\n", + " 0. , 0. , 0. , 0. , 0. ])},\n", + " 7: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([ -73.76297787, -84.11068413, -99.08956487, -102.73139077,\n", + " -65.98193177, -63.72649271, -66.35557655, -47.00841884,\n", + " -27.83754147, -58.65351462, -17.91631418, -31.3729593 ,\n", + " -11.30332785, -8.96608866, -6.9142119 , -9.28875209]),\n", + " 'z': array([18.64289795, 1.06408548, 4.642824 , 4.91191071, 27.02547765,\n", + " 30.32041875, 34.69096995, 35.3872896 , 28.99568217, 32.88988769,\n", + " 5.5118178 , 10.67491732, 2.00321804, 1.42482125, 2.24978546,\n", + " 0.61636763]),\n", + " 'c': array([25.5, 33.4, 33.8, 33.9, 21.2, 19.4, 16.8, 13.5, 6.4, 14.1, 0.2,\n", + " 2.1, 0. , 0. , 0. , 0. ])},\n", + " 8: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([-76.14318458, -38.61504398, -77.26428041, -90.65647631,\n", + " -73.78944513, -50.69761194, -84.72772427, -19.97462745,\n", + " -15.71906944, -11.7142282 , -16.10601562, -6.08499132,\n", + " -5.43795618, -5.63244445, -5.78452319, -5.43550785]),\n", + " 'z': array([18.47014595, 40.93644303, 19.87684185, 4.00407135, 19.84974271,\n", + " 34.03450903, 8.58124619, 12.31322311, 14.16381784, 2.02424316,\n", + " 3.50018079, 2.39915506, 1.94608352, 1.75336086, 2.00400833,\n", + " 2.5019934 ]),\n", + " 'c': array([23.9, 13.2, 22.2, 25.3, 22.5, 14.3, 22.7, 5. , 1.8, 0. , 0.9,\n", + " 0. , 0. , 0. , 0. , 0. ])},\n", + " 9: {'x': array([ 3000., 6000., 9000., 12000., 15000., 18000., 21000., 24000.,\n", + " 27000., 30000., 33000., 36000., 39000., 42000., 45000., 48000.]),\n", + " 'y': array([ -87.11240468, -88.58981476, -116.05483179, -89.40426019,\n", + " -83.31175542, -84.47527842, -45.14124752, -54.92142752,\n", + " -25.00155725, -23.71708584, -16.51391822, -9.73139814,\n", + " -15.01075531, -6.56120037, -8.15728398, -6.27575905]),\n", + " 'z': array([ 3.62630277, 9.51038404, 9.04789158, 5.28366629, 13.67574873,\n", + " 2.53608348, 27.87970085, 29.28781756, 16.22092121, 19.62062683,\n", + " 4.48558986, 1.72619632, 2.37281374, 1.07180294, 0.71999168,\n", + " 1.37417581]),\n", + " 'c': array([29.7, 26.3, 34. , 28.4, 25.4, 26.7, 12.6, 15.7, 7.2, 4.2, 0.1,\n", + " 0. , 0. , 0. , 0. , 0. ])}},\n", " 'sac': {0: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", - " 8500., 9000., 9500., 10000., 10500., 11000., 11500., 12000.,\n", - " 12500., 13000., 13500., 14000., 14500., 15000., 15500., 16000.,\n", - " 16500., 17000., 17500., 18000., 18500., 19000., 19500., 20000.,\n", - " 20500., 21000., 21500., 22000., 22500., 23000., 23500., 24000.,\n", - " 24500., 25000., 25500., 26000., 26500., 27000., 27500., 28000.,\n", - " 28500., 29000., 29500., 30000., 30500., 31000., 31500., 32000.,\n", - " 32500., 33000., 33500., 34000., 34500., 35000., 35500., 36000.,\n", - " 36500., 37000., 37500., 38000., 38500., 39000., 39500., 40000.,\n", - " 40500., 41000., 41500., 42000., 42500., 43000., 43500., 44000.,\n", - " 44500., 45000., 45500., 46000., 46500., 47000., 47500., 48000.,\n", - " 48500., 49000., 49500., 50000.]),\n", - " 'y': array([-2207.06773831, -2117.67943905, -551.04386437, -590.05304758,\n", - " -2831.79414787, -566.03010933, -6588.12435863, -964.10067527,\n", - " -631.16948533, -460.21299395, -463.59737553, -445.37279494,\n", - " -436.23489907, -434.64780706, -601.04822929, -430.79555494,\n", - " -446.63970408, -474.63919714, -447.04893141, -444.06146417,\n", - " -457.44239881, -443.11593619, -443.49116007, -452.33985722,\n", - " -457.62808993, -458.89936599, -441.09380075, -450.33577102,\n", - " -442.64810277, -532.07590241, -426.62426052, -440.82777403,\n", - " -467.3498813 , -415.01285843, -417.16557887, -440.69252751,\n", - " -423.01242895, -420.16424804, -697.54827221, -530.10647135,\n", - " -461.33400932, -455.23493614, -432.47789691, -425.97486177,\n", - " -423.33507737, -566.35222126, -462.78532791, -589.80277173,\n", - " -569.52402762, -491.26496572, -452.30023126, -462.41720115,\n", - " -458.73025193, -445.01151934, -501.89308882, -442.73961928,\n", - " -434.40621482, -441.20753129, -447.78775366, -415.18201438,\n", - " -396.13880542, -406.74219593, -407.4954307 , -405.75044259,\n", - " -374.37702905, -462.11623021, -402.86422688, -387.01668285,\n", - " -1978.48893089, -424.5550627 , -426.32978458, -462.91302006,\n", - " -412.8554165 , -351.29712272, -398.05518014, -431.02593602,\n", - " -442.07817245, -443.78263741, -414.40314742, -410.39447363,\n", - " -406.3826611 , -389.15277467, -402.15158614, -390.30438573,\n", - " -407.34766499, -378.58587573, -392.50121527, -390.63268597,\n", - " -307.65739018, -408.64016364, -420.76629092, -407.73827645,\n", - " -466.8192808 , -460.35901712, -401.34921201, -423.85426561,\n", - " -418.30363483, -418.03619657, -412.83595358, -421.00202948]),\n", - " 'c': array([66.5, 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. ,\n", - " 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. ,\n", - " 75. , 75. , 75. , 74.4, 73.3, 73.8, 73.3, 68.1, 75. , 75. , 75. ,\n", - " 75. , 75. , 75. , 71.7, 60.9, 75. , 75. , 75. , 75. , 75. , 74.7,\n", - " 73.4, 72.7, 71.8, 72.8, 72.7, 72.7, 71. , 71. , 72. , 71.6, 71.3,\n", - " 75. , 71.1, 70.8, 70.3, 69.7, 62.7, 65.2, 60.6, 65.1, 54.1, 70.1,\n", - " 71.1, 67.3, 58.2, 68.5, 72. , 72.6, 72.4, 62.7, 70.2, 72.6, 72.7,\n", - " 71.3, 70.8, 73.4, 72.8, 74.2, 73.6, 72.2, 74. , 72.8, 70.1, 69.6,\n", - " 66.1, 66.6, 71.6, 73.7, 73.2, 75. , 64.4, 74.8, 74.3, 75. , 70.6,\n", - " 66.1])},\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-78.07790812, -60.33753125, -29.63921306, -60.75737487,\n", + " -7.19386485, -7.20750915, -7.70115104, -8.68704632,\n", + " -8.42646033, -5.86816584, -5.69042965, -5.18544206,\n", + " -6.0289549 , -6.30382655, -5.5702274 , -5.2551961 ,\n", + " -5.84007806, -6.97435863, -5.74449092, -5.75024993]),\n", + " 'z': array([19.84491073, 43.95713772, 34.81889161, 34.62960477, 2.04793161,\n", + " 1.64875443, 1.33900012, 2.53493957, 1.34047503, 1.36565155,\n", + " 1.11282274, 0.48880285, 1.24949952, 1.31641216, 1.07702055,\n", + " 1.48261893, 1.32239444, 1.60655968, 1.15342742, 0.89049145]),\n", + " 'c': array([24.5, 14.7, 7.5, 15.1, 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 1: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-65.24821296, -24.43987314, -17.54505894, -12.43690074,\n", + " -7.20531116, -6.61096602, -6.97821511, -5.51898878,\n", + " -6.37463325, -5.33324126, -5.37788904, -5.27448811,\n", + " -5.72307335, -6.01937392, -4.894859 , -5.92607578,\n", + " -6.53192607, -6.29820647, -5.89273643, -6.85415147]),\n", + " 'z': array([33.64595278, 37.50972551, 8.71057061, 7.02629526, 1.74683554,\n", + " 1.84420431, 2.22292818, 1.93067024, 1.31936557, 1.82069635,\n", + " 1.58433952, 1.42841273, 1.47304957, 1.52337978, 1.24834965,\n", + " 1.13248805, 2.35201219, 1.79630743, 1.229116 , 1.68602352]),\n", + " 'c': array([18.2, 5.4, 0.9, 0.4, 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 2: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-69.35735261, -57.95267831, -27.03133304, -7.35251776,\n", + " -9.74536485, -5.66213119, -5.26494809, -5.45806351,\n", + " -5.0730373 , -6.60276861, -5.35123771, -4.64795848,\n", + " -7.17886732, -6.10599183, -7.2799352 , -6.40995668,\n", + " -5.79168466, -5.55089296, -6.06540081, -5.7944525 ]),\n", + " 'z': array([28.87250967, 31.3791714 , 26.16133977, 2.29118464, 2.40681472,\n", + " 1.4120881 , 1.05344095, 1.29049847, 1.49915448, 1.99894645,\n", + " 2.07842612, 1.21846415, 0.96939657, 1.49616192, 1.39116869,\n", + " 1.58489047, 1.05979279, 0.56014305, 1.18636849, 1.20012059]),\n", + " 'c': array([20.3, 13.9, 5.8, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 3: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -69.06920537, -62.39885709, -20.20209215, -23.24543596,\n", + " -8.60319992, -8.480007 , -6.59101948, -12.26020956,\n", + " -8.86206397, -7.74456537, -10.05795771, -10.10407839,\n", + " -9.98968216, -10.61858908, -750.93058322, -375.49741798,\n", + " -268.30870606, -280.79743834, -259.59869236, -261.78042475]),\n", + " 'z': array([23.03818139, 35.47001515, 12.1680979 , 8.33074529, 2.43479351,\n", + " 2.92771657, 2.22521414, 4.43897227, 1.09660478, 1.46274702,\n", + " 2.58978269, 1.91678454, 2.62566084, 2.37970549, 20.79951087,\n", + " 10.53053647, 5.73098006, 4.79021059, 2.81688544, 1.60900841]),\n", + " 'c': array([20.2, 13.1, 0.4, 2.5, 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 46.1, 45. , 44.9, 45. , 45.4, 44.1])},\n", + " 4: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -76.89032409, -44.50686882, -45.47539624, -6.65421714,\n", + " -9.32473289, -6.75548172, -5.52250068, -6.1608249 ,\n", + " -4.58609088, -6.3651977 , -15.0163006 , -26.62013013,\n", + " -182.21047214, -441.53744631, -456.34930916, -547.62863901,\n", + " -337.98687975, -146.25134802, -130.88974202, -137.36593503]),\n", + " 'z': array([12.69779152, 38.43783449, 33.67128192, 3.89107248, 1.68747783,\n", + " 1.73313469, 1.31463171, 2.24104178, 1.8563587 , 2.1101253 ,\n", + " 10.087443 , 17.83370861, 27.84269648, 39.78186747, 33.38760259,\n", + " 35.8320893 , 22.40331481, 2.71972203, 1.67895073, 5.14868679]),\n", + " 'c': array([19.5, 11.8, 8. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 6.2, 21.4, 34.6, 38.3, 38.8, 38.6, 35.7, 36. , 19.9])},\n", + " 5: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-65.89964499, -52.82131708, -43.08648805, -12.17284954,\n", + " -9.77086312, -7.57493878, -5.93784215, -6.13931906,\n", + " -7.16364275, -5.33226168, -6.34258326, -7.66062225,\n", + " -9.75888468, -6.33416813, -6.97436693, -6.3959969 ,\n", + " -8.33078707, -7.07954414, -7.05645984, -6.17812513]),\n", + " 'z': array([33.47902333, 39.40488504, 35.05966812, 12.86299352, 4.20217461,\n", + " 2.18442224, 1.97054841, 2.76348552, 2.43071494, 0.77255432,\n", + " 1.07973661, 1.67228861, 2.69925011, 1.45342565, 1.76673998,\n", + " 0.90334686, 1.01542112, 1.46867459, 1.19474122, 1.0804704 ]),\n", + " 'c': array([18.6, 13.2, 8.9, 1.5, 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 6: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -73.12660912, -66.87809109, -26.98915078, -15.85976783,\n", + " -7.38437392, -6.35743866, -6.20616661, -6.66104479,\n", + " -9.78754722, -5.70179199, -7.10141513, -7.32442731,\n", + " -7.63959533, -325.1037648 , -2197.01475267, -619.32015768,\n", + " -417.56966432, -387.68059481, -335.28691077, -210.81929253]),\n", + " 'z': array([ 23.79420361, 31.00769587, 21.34951728, 10.13177236,\n", + " 3.55824778, 2.17232342, 2.55821224, 1.37710935,\n", + " 2.76360721, 1.67922421, 2.21043593, 1.45907418,\n", + " 2.18957235, 390.26788595, 11.8424026 , 5.43033293,\n", + " 1.87728046, 2.81303186, 3.44426974, 15.40038201]),\n", + " 'c': array([21. , 15.2, 3. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 13.2, 47. , 47. , 47. , 46.8, 45.1, 42.1])},\n", + " 7: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-86.14745349, -69.82938704, -60.55254439, -14.04169523,\n", + " -7.34542197, -7.2115004 , -10.55462428, -9.00398941,\n", + " -10.55297465, -10.35577563, -10.16653991, -8.10488631,\n", + " -9.65560524, -6.9296579 , -8.60702596, -7.21454063,\n", + " -13.10179014, -5.93049151, -5.92612057, -26.02003013]),\n", + " 'z': array([ 2.18792122, 29.35488398, 44.26681783, 9.05993667, 1.98442795,\n", + " 2.47778851, 3.42120682, 2.97468921, 1.86665993, 2.15497783,\n", + " 2.75509479, 2.1312129 , 2.7818385 , 2.26897937, 1.6689237 ,\n", + " 1.67910899, 9.38348996, 0.84031475, 1.26995393, 10.14208448]),\n", + " 'c': array([25.6, 16.7, 14.1, 0.4, 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 1.6, 0. , 0. , 0.3])},\n", + " 8: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-74.9859112 , -69.01984779, -23.65836429, -31.19489791,\n", + " -16.4698042 , -6.11075637, -6.71493862, -5.94775284,\n", + " -6.67796766, -6.54471716, -9.22281525, -5.90887619,\n", + " -6.03234983, -5.61588372, -7.04708809, -4.71964615,\n", + " -6.39281846, -8.10944542, -7.17309419, -5.72201423]),\n", + " 'z': array([22.52407981, 20.69104383, 13.71149863, 22.93448607, 8.89372125,\n", + " 1.70029396, 2.28735753, 1.35420945, 2.20279422, 2.44834516,\n", + " 3.2820735 , 1.73847588, 1.28879417, 0.80597151, 1.28274854,\n", + " 1.18829384, 1.74807889, 2.22482051, 1.68242802, 1.69691565]),\n", + " 'c': array([25.1, 18. , 6.2, 10.7, 2.9, 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 9: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-69.60505268, -62.10412986, -24.58642659, -10.59314954,\n", + " -10.67013602, -7.78939339, -7.13856097, -5.61309561,\n", + " -6.18964005, -6.57873348, -7.34552924, -8.00388536,\n", + " -5.64702154, -7.41298009, -7.7534408 , -23.97246065,\n", + " -43.34849873, -15.53243225, -12.07979048, -6.82981321]),\n", + " 'z': array([25.43346732, 48.16056757, 26.83800791, 6.64509126, 4.08690817,\n", + " 2.95224512, 1.61868099, 1.12129537, 2.88810154, 2.20185379,\n", + " 2.06130777, 3.04401063, 0.98670066, 2.94783452, 3.23984619,\n", + " 12.53312986, 46.4391647 , 20.68746487, 5.66447056, 2.37961924]),\n", + " 'c': array([22.1, 13.1, 3.2, 0.5, 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0.9, 4.5, 1.1, 0. , 0. ])}},\n", + " 'td3': {0: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-85.94421168, -70.06199891, -88.3518554 , -58.78658976,\n", + " -35.89540506, -5.82941956, -4.9417647 , -7.37047535,\n", + " -7.24192199, -5.8245196 , -5.20987856, -5.32613607,\n", + " -7.02438922, -6.00310981, -6.16076265, -6.06208038,\n", + " -5.80742577, -5.99900247, -5.55101179, -6.51476477]),\n", + " 'z': array([22.00836082, 28.41609996, 19.45257435, 37.81685459, 40.53740952,\n", + " 1.76770043, 1.69423616, 2.9786095 , 2.67761847, 1.45400024,\n", + " 1.40209404, 1.18736774, 1.70186473, 1.83284751, 2.03263634,\n", + " 2.03417312, 1.80392433, 2.06413943, 1.74770794, 1.64181311]),\n", + " 'c': array([22.7, 20.5, 23.8, 13. , 4.7, 0.2, 0. , 0. , 0. , 0. , 0. ,\n", + " 0.2, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 1: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-88.1460536 , -45.94947908, -72.08559651, -57.45342627,\n", + " -74.96474968, -60.11658654, -25.26550983, -29.32336914,\n", + " -9.07853327, -7.60492815, -6.37685471, -6.64473372,\n", + " -5.45837994, -5.88706237, -6.26822371, -6.13553622,\n", + " -7.28173944, -6.52344741, -7.07458376, -16.89156441]),\n", + " 'z': array([28.49763712, 41.19661576, 19.41998835, 31.86997608, 28.947596 ,\n", + " 31.18194991, 21.98156012, 28.64739858, 1.65128532, 2.71596572,\n", + " 1.99293239, 1.79777877, 1.81693931, 2.34535738, 1.40003113,\n", + " 1.36194808, 2.30500656, 2.02489599, 1.36382605, 15.7198095 ]),\n", + " 'c': array([22.7, 13.3, 19.7, 14.8, 17.8, 13.8, 3.6, 4.7, 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 2.4])},\n", + " 2: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -98.5289618 , -102.24355779, -84.02528359, -107.08753813,\n", + " -87.27047944, -73.89035748, -12.0645071 , -10.89341377,\n", + " -10.99018025, -27.67190238, -20.66131911, -13.40694394,\n", + " -13.78103301, -130.4538544 , -150.61173877, -79.03635174,\n", + " -59.44704119, -13.33556566, -18.26920826, -150.85806681]),\n", + " 'z': array([25.62763485, 5.68424539, 26.56401701, 48.0067934 , 32.32528243,\n", + " 26.51638531, 2.42785593, 3.42010511, 4.62545024, 18.38454035,\n", + " 12.61100562, 8.99854172, 3.06996942, 98.39072559, 26.889394 ,\n", + " 19.7327986 , 35.45527924, 9.17399271, 19.00256783, 7.53924281]),\n", + " 'c': array([23. , 28.2, 21.4, 19.2, 17.6, 17.4, 0. , 0. , 0. , 0. , 0.3,\n", + " 1.6, 0. , 16.3, 32. , 22.7, 15.6, 3.5, 2.8, 32.3])},\n", + " 3: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-81.17712593, -93.9318703 , -93.34002398, -11.75896037,\n", + " -29.55926182, -13.30365237, -9.84147134, -15.48659984,\n", + " -9.30149957, -8.17068386, -33.37286029, -18.71896217,\n", + " -11.37618481, -9.01509544, -7.74705464, -9.75446444,\n", + " -5.50155901, -9.86711277, -11.08479701, -11.66659277]),\n", + " 'z': array([19.57616024, 27.5174342 , 21.51364681, 5.13931048, 15.87992909,\n", + " 11.39115668, 1.8216829 , 13.49331146, 3.47620495, 1.96155532,\n", + " 29.199426 , 13.10804325, 3.24274847, 2.61679213, 2.5945896 ,\n", + " 3.02973773, 2.92192891, 3.3473363 , 1.82037911, 3.84121644]),\n", + " 'c': array([26.3, 26.7, 21.6, 0.3, 7.2, 2.7, 0. , 1.5, 0. , 0. , 7.3,\n", + " 2.6, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 4: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-63.60283691, -99.64012494, -79.57277764, -64.42824744,\n", + " -40.91249357, -47.75957188, -40.79252709, -7.67718137,\n", + " -5.96233383, -8.9197078 , -6.99532597, -7.20078574,\n", + " -8.51271868, -6.11621019, -6.64270924, -6.46947919,\n", + " -8.10823296, -6.73752411, -7.31021255, -6.17837895]),\n", + " 'z': array([24.4758332 , 29.54834317, 20.36304887, 32.20340305, 35.95879232,\n", + " 29.97400078, 31.81034795, 3.13478173, 2.71438894, 4.76449562,\n", + " 5.32753978, 2.04918421, 2.03030747, 1.77817332, 1.51427702,\n", + " 1.45356767, 1.58260899, 1.84827779, 1.99469583, 1.95895778]),\n", + " 'c': array([18.4, 23.8, 21.8, 17. , 11.5, 13.9, 11.5, 0. , 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 5: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-84.17921624, -75.48351039, -83.36004308, -82.28601454,\n", + " -75.1940769 , -53.56318152, -61.15271655, -80.95176007,\n", + " -60.80490099, -34.6495125 , -23.22871368, -48.77044682,\n", + " -27.42559418, -8.91388802, -6.78943397, -7.98161263,\n", + " -8.09689424, -13.54414852, -12.82482307, -13.37233233]),\n", + " 'z': array([15.52965767, 23.11948828, 7.45922424, 16.53932144, 26.73992653,\n", + " 35.63630325, 43.9307807 , 38.96964603, 37.32594168, 27.84997745,\n", + " 23.53194294, 37.70893402, 23.91573772, 2.57305833, 1.91073188,\n", + " 1.73090874, 1.6957232 , 1.60634858, 2.72565049, 4.34738185]),\n", + " 'c': array([24.1, 22.3, 22.9, 23.1, 19.5, 14.8, 13.1, 19.4, 13.9, 7.7, 4.2,\n", + " 9.4, 4.9, 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 6: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-154.34850522, -120.10633844, -53.41207805, -65.04308116,\n", + " -36.90420265, -39.5696131 , -48.95626 , -22.0555279 ,\n", + " -9.89730637, -7.37245373, -7.37403052, -6.75279697,\n", + " -7.24332818, -6.78674071, -7.96264292, -7.94443069,\n", + " -7.69252758, -8.67176996, -9.87883256, -12.22547147]),\n", + " 'z': array([21.3295575 , 49.62113909, 38.43709574, 39.13836323, 21.8348643 ,\n", + " 30.03320835, 32.25098733, 18.59139114, 6.07970331, 1.16559597,\n", + " 1.84826624, 1.16065894, 1.75867394, 2.20756202, 0.7946329 ,\n", + " 1.19733069, 0.55129436, 1.11288335, 1.69025951, 3.03667543]),\n", + " 'c': array([29.6, 18.9, 13.5, 16.6, 10.9, 10.1, 11.2, 4.3, 0. , 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 7: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-102.63481832, -172.71014804, -103.77656218, -24.05083644,\n", + " -7.66221092, -7.90928273, -8.46990309, -17.49743608,\n", + " -6.33918355, -45.11152547, -42.65535513, -7.68147045,\n", + " -6.60143647, -7.61578275, -6.34325229, -7.88846321,\n", + " -9.66269034, -6.74711147, -6.94283885, -7.09796546]),\n", + " 'z': array([19.05515338, 68.19663241, 41.56309264, 19.435171 , 4.26060053,\n", + " 4.27840437, 3.7185322 , 23.37100265, 2.54804485, 43.67874513,\n", + " 40.3249004 , 1.95806576, 1.83900344, 2.0136299 , 1.88739857,\n", + " 2.81153385, 4.09005582, 0.82518325, 0.86247953, 2.42411192]),\n", + " 'c': array([24.7, 20.6, 21.7, 4.2, 0. , 0.1, 0. , 2. , 0. , 8.6, 8.5,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 8: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-92.20479425, -19.47851148, -26.84153924, -60.18777517,\n", + " -16.55054841, -8.94515712, -12.34361683, -15.25074933,\n", + " -6.70738605, -17.09117543, -8.09510113, -9.69472508,\n", + " -11.64001963, -8.53629672, -8.25961659, -6.72398336,\n", + " -8.44622293, -10.03643956, -7.58991522, -8.72999117]),\n", + " 'z': array([16.34015432, 16.98137133, 18.47352252, 33.15342032, 10.28662385,\n", + " 4.74198897, 3.33383242, 13.4792587 , 2.5046929 , 14.97965586,\n", + " 2.91004502, 1.70101924, 1.32698971, 0.62890362, 1.36244358,\n", + " 1.54492238, 1.77453121, 3.93491631, 2.31683249, 1.91600621]),\n", + " 'c': array([22.5, 2.7, 5.4, 16.1, 2.3, 0. , 2.1, 3.4, 0. , 0.7, 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 9: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-90.7885944 , -87.11952652, -64.70260735, -19.88583916,\n", + " -11.20716592, -73.29198892, -21.61302407, -10.02653052,\n", + " -9.82398538, -6.7724848 , -9.30154277, -11.07868205,\n", + " -9.58243823, -7.53893544, -8.12099766, -10.73081034,\n", + " -13.73528459, -11.1555124 , -15.45595653, -29.84646264]),\n", + " 'z': array([ 6.77511907, 11.74216639, 37.61303157, 18.157194 , 7.29019766,\n", + " 18.47515421, 37.19238422, 3.34941509, 1.62778328, 1.69626118,\n", + " 2.1458154 , 3.67711941, 3.06687186, 2.09753463, 2.73303713,\n", + " 4.0618146 , 7.10106535, 2.65942251, 7.52857632, 37.59229216]),\n", + " 'c': array([25.7, 23.7, 17.4, 6.8, 2.6, 17.6, 4.3, 2.1, 1.1, 0. , 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 1.8])}},\n", + " 'ddpg': {0: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -95.95716586, -77.536507 , -90.2450936 , -73.37329898,\n", + " -82.54700179, -80.78333082, -51.6642578 , -64.8446082 ,\n", + " -76.75541837, -14.35556661, -85.20919299, -13.37806732,\n", + " -72.13630312, -206.19050725, -450.77263209, -509.61860935,\n", + " -209.64849755, -166.91184915, -126.29368466, -18.64014551]),\n", + " 'z': array([ 37.62329252, 20.1873807 , 26.12227757, 39.15299444,\n", + " 36.23030113, 40.00687884, 33.29503219, 35.15646545,\n", + " 40.20367932, 9.95610163, 21.40140716, 5.26507638,\n", + " 95.21710862, 110.63661417, 61.29253974, 51.43481752,\n", + " 28.04528254, 41.45146256, 30.83840654, 2.71839663]),\n", + " 'c': array([22.4, 23.5, 19.7, 18.2, 12.4, 16.2, 11.8, 16.5, 21.1, 3.6, 10.6,\n", + " 0. , 5.7, 15.3, 19.6, 31.3, 25.8, 25. , 15.3, 0. ])},\n", " 1: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", - " 8500., 9000., 9500., 10000., 10500., 11000., 11500., 12000.,\n", - " 12500., 13000., 13500., 14000., 14500., 15000., 15500., 16000.,\n", - " 16500., 17000., 17500., 18000., 18500., 19000., 19500., 20000.,\n", - " 20500., 21000., 21500., 22000., 22500., 23000., 23500., 24000.,\n", - " 24500., 25000., 25500., 26000., 26500., 27000., 27500., 28000.,\n", - " 28500., 29000., 29500., 30000., 30500., 31000., 31500., 32000.,\n", - " 32500., 33000., 33500., 34000., 34500., 35000., 35500., 36000.,\n", - " 36500., 37000., 37500., 38000., 38500., 39000., 39500., 40000.,\n", - " 40500., 41000., 41500., 42000., 42500., 43000., 43500., 44000.,\n", - " 44500., 45000., 45500., 46000., 46500., 47000., 47500., 48000.,\n", - " 48500., 49000., 49500., 50000.]),\n", - " 'y': array([-1.39978279e+04, -1.16337191e+04, -2.68644583e+02, -4.81148099e+02,\n", - " -5.37553632e+03, -9.65428582e+02, -4.47807589e+02, -6.01029205e+02,\n", - " -6.23635546e+02, -4.86614758e+02, -5.86516396e+02, -5.26575099e+02,\n", - " -5.99829531e+02, -9.83383993e+02, -1.22709868e+03, -6.63552712e+02,\n", - " -6.17231316e+02, -5.18178522e+02, -4.96102642e+02, -5.25783329e+02,\n", - " -5.66184133e+02, -5.39487841e+02, -6.24070572e+02, -5.12905280e+02,\n", - " -4.71160085e+02, -4.83385453e+02, -4.61873205e+02, -4.94672987e+02,\n", - " -2.69529493e+02, -6.25345326e+02, -4.65210493e+02, -5.22635239e+02,\n", - " -4.13307282e+02, -5.34696272e+02, -7.08614824e+02, -6.07681299e+02,\n", - " -4.35267591e+02, -4.43722224e+02, -4.71928035e+02, -4.35631926e+02,\n", - " -4.51979864e+02, -4.85855008e+02, -5.26781025e+02, -5.07743968e+02,\n", - " -5.55303974e+02, -4.26916411e+02, -4.47085951e+02, -4.48743111e+02,\n", - " -4.55254629e+02, -4.60964782e+02, -4.23541517e+02, -4.27516038e+02,\n", - " -4.07931778e+02, -3.85629784e+02, -4.64468752e+02, -3.22505433e+02,\n", - " -4.33003342e+02, -3.47729789e+02, -4.16001104e+02, -4.92683452e+02,\n", - " -5.16864610e+02, -4.87113742e+02, -4.09838037e+02, -4.37475618e+02,\n", - " -4.40755991e+02, -4.45760725e+02, -3.98495226e+02, -3.37347642e+02,\n", - " -4.04627345e+02, -4.25855615e+02, -3.24957222e+02, -3.98866528e+02,\n", - " -5.13136206e+02, -5.34672920e+02, -8.47490255e+02, -6.65226782e+02,\n", - " -5.74850167e+02, -4.36338131e+02, -2.97705722e+02, -2.95073105e+02,\n", - " -2.30767740e+02, -5.06974657e+02, -2.16164066e+02, -5.27535377e+02,\n", - " -4.94164650e+02, -2.63081044e+02, -1.29482152e+01, -5.00627982e+02,\n", - " -3.61175779e+02, -2.99146934e+02, -3.66197322e+01, -3.80255393e+01,\n", - " -1.76372415e+01, -3.73752621e+02, -4.16135963e+02, -2.68355811e+02,\n", - " -1.34110857e+01, -1.18275825e+02, -1.78695343e+02, -4.77207037e+02]),\n", - " 'c': array([68.2, 75. , 59. , 70.8, 75. , 75. , 72.5, 75. , 75. , 75. , 75. ,\n", - " 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. , 74.5, 75. , 74.8,\n", - " 75. , 74.9, 74. , 73.7, 60.7, 54.3, 37. , 74.7, 74. , 50.5, 69.8,\n", - " 74.4, 75. , 71.5, 69. , 74.8, 72.6, 74.9, 72.3, 71.6, 75. , 75. ,\n", - " 73.5, 75. , 68.5, 68.5, 68.4, 70.6, 75. , 75. , 73.6, 58.7, 74.2,\n", - " 44.1, 74.4, 59.8, 71.7, 75. , 75. , 63.6, 60.4, 71.6, 66.1, 64.5,\n", - " 60.9, 56.2, 74.2, 67.1, 40.7, 56.9, 52. , 54.4, 56.9, 58.3, 69.5,\n", - " 65.2, 49.6, 58.3, 37.6, 65.6, 58. , 75. , 74. , 47.8, 0. , 58.7,\n", - " 45. , 39.2, 9.2, 13.4, 7.3, 54.6, 55.5, 45.7, 1. , 17.1, 30.2,\n", - " 67.1])},\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -84.75858338, -87.40659822, -71.40335766, -164.30481958,\n", + " -99.39236823, -157.97967084, -97.35770974, -106.74897974,\n", + " -31.69817764, -18.91556815, -6.85103537, -5.91246398,\n", + " -10.91007875, -9.23343595, -8.19291483, -9.30628586,\n", + " -5.96941117, -30.53573469, -22.3662505 , -87.49797925]),\n", + " 'z': array([15.37321957, 4.09066969, 48.9356664 , 22.30673999, 29.46223032,\n", + " 12.82361635, 11.0853089 , 1.34894136, 19.03377157, 16.46351775,\n", + " 1.98202837, 1.4020216 , 2.33703549, 2.99290714, 1.00996253,\n", + " 2.05439719, 1.58743207, 23.6289253 , 32.87234004, 53.79426573]),\n", + " 'c': array([23. , 27.5, 17.7, 21.4, 20.6, 31.7, 24.7, 27.9, 7.9, 3.3, 0. ,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 6. , 3.6, 19.7])},\n", " 2: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", - " 8500., 9000., 9500., 10000., 10500., 11000., 11500., 12000.,\n", - " 12500., 13000., 13500., 14000., 14500., 15000., 15500., 16000.,\n", - " 16500., 17000., 17500., 18000., 18500., 19000., 19500., 20000.,\n", - " 20500., 21000., 21500., 22000., 22500., 23000., 23500., 24000.,\n", - " 24500., 25000., 25500., 26000., 26500., 27000., 27500., 28000.,\n", - " 28500., 29000., 29500., 30000., 30500., 31000., 31500., 32000.,\n", - " 32500., 33000., 33500., 34000., 34500., 35000., 35500., 36000.,\n", - " 36500., 37000., 37500., 38000., 38500., 39000., 39500., 40000.,\n", - " 40500., 41000., 41500., 42000., 42500., 43000., 43500., 44000.,\n", - " 44500., 45000., 45500., 46000., 46500., 47000., 47500., 48000.,\n", - " 48500., 49000., 49500., 50000.]),\n", - " 'y': array([ -419.88586153, -475.93118421, -506.05602028, -421.21010321,\n", - " -442.04410281, -419.8867473 , -733.38972642, -567.68615098,\n", - " -561.41979275, -551.65090128, -610.19774788, -11580.04250759,\n", - " -588.76837472, -577.91330549, -438.90882211, -351.55491758,\n", - " -511.2351893 , -441.68311177, -659.24799389, -415.74915622,\n", - " -380.77272297, -401.21784902, -407.82671221, -7521.76404587,\n", - " -6255.92575407, -462.03041279, -454.34549041, -624.00920391,\n", - " -589.08490456, -484.01619275, -471.51571345, -471.88553558,\n", - " -453.5311147 , -469.57877974, -435.59546103, -516.29733513,\n", - " -619.34171443, -591.87216081, -589.21659591, -419.7929405 ,\n", - " -456.44025254, -428.02778266, -593.25319192, -542.0477097 ,\n", - " -541.5520789 , -558.52901035, -553.19642527, -553.79373361,\n", - " -558.56120686, -516.49383055, -512.66534195, -697.33357848,\n", - " -876.82429058, -1098.07554216, -477.61280733, -451.74065975,\n", - " -402.50068199, -517.77069188, -598.29257578, -582.8069224 ,\n", - " -577.87749496, -563.14204888, -566.41514884, -541.71606806,\n", - " -8335.82760085, -10175.94399261, -618.2934174 , -587.77629693,\n", - " -608.33836633, -599.95524999, -564.8702342 , -574.4031588 ,\n", - " -480.29490336, -897.76520987, -648.83600589, -681.24317771,\n", - " -662.76882584, -724.81546013, -616.04125164, -782.29932119,\n", - " -520.26047829, -648.33646013, -436.91912141, -422.82299453,\n", - " -430.22012838, -524.30239139, -2967.19183164, -448.6750295 ,\n", - " -432.69993831, -414.5267237 , -529.19645949, -504.54926939,\n", - " -3856.07001263, -484.63436687, -444.50897046, -416.34501806,\n", - " -426.66330806, -436.18707116, -426.51229272, -418.83342933]),\n", - " 'c': array([65.1, 75. , 70.5, 68.7, 72.8, 70.3, 75. , 73.7, 72.5, 72.1, 71.7,\n", - " 75. , 75. , 75. , 73.8, 54.3, 75. , 75. , 72.7, 74.9, 53. , 53.7,\n", - " 68.3, 75. , 75. , 72.3, 67.9, 74.7, 74.8, 75. , 75. , 75. , 71.3,\n", - " 67.6, 68.4, 70.7, 72.5, 74.4, 72.8, 68.6, 73.6, 73.5, 75. , 75. ,\n", - " 75. , 75. , 75. , 74.2, 75. , 75. , 75. , 75. , 75. , 75. , 62.9,\n", - " 75. , 63.7, 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. , 75. ,\n", - " 75. , 74.8, 75. , 75. , 74.5, 74.8, 71.9, 75. , 75. , 75. , 75. ,\n", - " 74.7, 74.9, 75. , 74.5, 74.9, 75. , 70.3, 67.1, 75. , 68.8, 74. ,\n", - " 75. , 74.8, 69.1, 75. , 75. , 75. , 68.8, 72.1, 74.3, 72.7, 75. ,\n", - " 71. ])},\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-244.86325227, -86.3951179 , -85.37054626, -76.94824509,\n", + " -81.44168073, -77.58697262, -33.36149021, -105.36088698,\n", + " -96.24617048, -96.71227961, -93.16172698, -105.29479754,\n", + " -126.53547428, -128.7206711 , -107.23703396, -95.2677292 ,\n", + " -35.4000771 , -9.13614517, -104.2566314 , -11.59143422]),\n", + " 'z': array([32.92787665, 6.05179659, 10.69826999, 33.28526877, 15.57732296,\n", + " 22.10218203, 25.21959222, 41.97317817, 30.7794646 , 28.29451386,\n", + " 13.33886871, 1.68283583, 1.28672757, 0.5135691 , 2.01013613,\n", + " 38.40509519, 28.99504031, 3.47007711, 24.24935571, 7.70797002]),\n", + " 'c': array([31.8, 26.5, 23. , 19.6, 27.3, 23. , 7.2, 20.6, 24.1, 26.7, 27.7,\n", + " 33.3, 29.9, 29.9, 31.7, 19.5, 7.5, 0.1, 24. , 2.1])},\n", " 3: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", - " 8500., 9000., 9500., 10000., 10500., 11000., 11500., 12000.,\n", - " 12500., 13000., 13500., 14000., 14500., 15000., 15500., 16000.,\n", - " 16500., 17000., 17500., 18000., 18500., 19000., 19500., 20000.,\n", - " 20500., 21000., 21500., 22000., 22500., 23000., 23500., 24000.,\n", - " 24500., 25000., 25500., 26000., 26500., 27000., 27500., 28000.,\n", - " 28500., 29000., 29500., 30000., 30500., 31000., 31500., 32000.,\n", - " 32500., 33000., 33500., 34000., 34500., 35000., 35500., 36000.,\n", - " 36500., 37000., 37500., 38000., 38500., 39000., 39500., 40000.,\n", - " 40500., 41000., 41500., 42000., 42500., 43000., 43500., 44000.,\n", - " 44500., 45000., 45500., 46000., 46500., 47000., 47500., 48000.,\n", - " 48500., 49000., 49500., 50000.]),\n", - " 'y': array([-402.87768605, -416.03925652, -397.67433939, -15.51086244,\n", - " -338.74450269, -13.78740854, -67.46660934, -21.78790687,\n", - " -29.73239106, -32.3916248 , -54.77101753, -92.82352469,\n", - " -107.97453387, -9.60859989, -14.76553584, -22.88353444,\n", - " -12.82946344, -9.34521305, -132.37672991, -21.05507744,\n", - " -15.84140431, -18.83522117, -19.40723473, -19.54364041,\n", - " -7.09304335, -37.89645659, -4.56799086, -13.06765202,\n", - " -9.07659618, -17.14236606, -19.15115235, -9.32101615,\n", - " -5.29976631, -36.1093172 , -18.30178842, -6.8827166 ,\n", - " -8.89846165, -7.57448302, -13.61864501, -18.04705961,\n", - " -19.02294347, -23.07490903, -18.25020172, -16.16611976,\n", - " -28.15521032, -34.55727866, -12.80221679, -28.29491116,\n", - " -43.0927741 , -28.53239886, -11.11941774, -22.20636851,\n", - " -8.05345016, -13.29283074, -25.64067706, -23.33745374,\n", - " -9.50822568, -9.25335078, -14.12768848, -6.93936013,\n", - " -25.64781529, -25.26731862, -26.42465292, -51.34845922,\n", - " -55.17523907, -41.99262818, -43.06463348, -63.5350736 ,\n", - " -65.10800316, -39.23369191, -31.83457628, -38.7218268 ,\n", - " -25.70734803, -24.22637536, -179.86802223, -5.47959912,\n", - " -7.17286481, -8.74904283, -15.47622047, -13.02475882,\n", - " -20.18156647, -15.31774842, -17.21061661, -17.18823759,\n", - " -27.56809509, -25.49753247, -16.33563442, -26.52646995,\n", - " -38.69693586, -28.86193921, -24.89950026, -45.43547793,\n", - " -82.49761768, -52.29974492, -4.69658362, -22.28091931,\n", - " -30.59838984, -48.22223506, -35.83578102, -15.7452339 ]),\n", - " 'c': array([69.8, 70.4, 50.4, 0.2, 34.7, 0. , 0.8, 0. , 0. , 0. , 1. ,\n", - " 0.9, 0.1, 0. , 0. , 0. , 0. , 0. , 1.4, 0. , 0.2, 0. ,\n", - " 0. , 0.2, 0. , 1.4, 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", - " 0.7, 0. , 0. , 0. , 0. , 0.4, 0.9, 0.3, 0.5, 0. , 0.2,\n", - " 0. , 0.3, 0. , 0.7, 0.7, 0.5, 0. , 0. , 0. , 0. , 0.4,\n", - " 0.3, 0.1, 0. , 0. , 0. , 0.5, 0. , 0. , 0. , 0.7, 0. ,\n", - " 0.5, 0.1, 1.5, 0.4, 1. , 0.2, 0.2, 0. , 3.9, 0. , 0. ,\n", - " 0. , 0. , 0.1, 0. , 0. , 0. , 0.5, 0.3, 0.1, 0. , 0.1,\n", - " 0.1, 0.1, 0.1, 0.1, 0. , 0. , 0. , 0.4, 0.4, 0.3, 0.2,\n", - " 0. ])},\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -88.73759221, -126.00000311, -90.99067143, -80.46410846,\n", + " -12.83645707, -16.03716323, -42.81936977, -13.51917284,\n", + " -24.75945539, -35.83917692, -14.02774955, -9.29914404,\n", + " -10.60575605, -11.95517279, -7.10112706, -7.92139116,\n", + " -19.43059879, -75.15357008, -13.38348433, -7.0985137 ]),\n", + " 'z': array([13.15407607, 52.46704738, 2.18520214, 11.22204933, 12.81667528,\n", + " 7.51305438, 27.89587958, 13.54602554, 23.6786912 , 37.86153515,\n", + " 14.33454502, 4.06873548, 4.58162264, 8.51931056, 3.7506986 ,\n", + " 1.18807648, 28.95627727, 30.58058651, 24.14060408, 1.41197402]),\n", + " 'c': array([26.2, 26.3, 29.5, 22.7, 1.8, 3.5, 11.7, 1.7, 7.5, 7.8, 2.8,\n", + " 0.8, 1.3, 3.6, 0.9, 0. , 4.4, 21.7, 2.4, 0. ])},\n", " 4: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", - " 8500., 9000., 9500., 10000., 10500., 11000., 11500., 12000.,\n", - " 12500., 13000., 13500., 14000., 14500., 15000., 15500., 16000.,\n", - " 16500., 17000., 17500., 18000., 18500., 19000., 19500., 20000.,\n", - " 20500., 21000., 21500., 22000., 22500., 23000., 23500., 24000.,\n", - " 24500., 25000., 25500., 26000., 26500., 27000., 27500., 28000.,\n", - " 28500., 29000., 29500., 30000., 30500., 31000., 31500., 32000.,\n", - " 32500., 33000., 33500., 34000., 34500., 35000., 35500., 36000.,\n", - " 36500., 37000., 37500., 38000., 38500., 39000., 39500., 40000.,\n", - " 40500., 41000., 41500., 42000., 42500., 43000., 43500., 44000.,\n", - " 44500., 45000., 45500., 46000., 46500., 47000., 47500., 48000.,\n", - " 48500., 49000., 49500., 50000.]),\n", - " 'y': array([-4.57500509e+02, -4.77922800e+02, -7.96641101e+01, -2.84932400e+02,\n", - " -1.09807566e+02, -6.53430145e+00, -5.41508352e+00, -4.98000493e+00,\n", - " -6.74103101e+00, -7.40928002e+00, -7.43599825e+00, -1.24499965e+01,\n", - " -1.17793271e+01, -5.01812639e+00, -5.90506680e+00, -7.71943377e+00,\n", - " -4.07006721e+00, -3.69255045e+00, -1.08510502e+01, -4.11559234e+00,\n", - " -3.85794991e+00, -1.50870028e+01, -5.76964481e+00, -4.79259043e+00,\n", - " -7.02187923e+00, -4.91691786e+00, -4.03516720e+00, -3.73672692e+00,\n", - " -7.17760023e+00, -3.51616247e+00, -3.45872154e+00, -4.91154067e+00,\n", - " -4.40776768e+00, -3.77920118e+00, -5.27338228e+00, -3.82112310e+00,\n", - " -3.46206971e+00, -4.37610729e+00, -4.76651356e+00, -5.09622795e+00,\n", - " -3.85846420e+00, -4.80509717e+00, -1.39312864e+02, -6.80146920e+00,\n", - " -6.35461635e+00, -4.93699939e+00, -3.24652009e+00, -1.30364774e+01,\n", - " -4.53752632e+00, -8.93677593e+00, -3.93623019e+00, -5.97454732e+00,\n", - " -1.11628673e+01, -8.55298337e+00, -3.87388258e+00, -5.23504896e+02,\n", - " -6.00014427e+04, -1.06109734e+03, -2.33565026e+02, -5.45415616e+02,\n", - " -7.04119988e+00, -3.64701379e+00, -4.14990241e+00, -1.04589259e+01,\n", - " -7.42013209e+00, -2.48328808e+00, -4.96037687e+00, -3.65201618e+00,\n", - " -4.37270843e+00, -5.32763222e+00, -8.32588126e+00, -1.14359864e+01,\n", - " -6.07596597e+00, -1.32472316e+01, -5.13856728e+00, -3.80266858e+00,\n", - " -5.04807738e+00, -6.82508155e+00, -3.51097630e+00, -3.21814344e+00,\n", - " -3.20756772e+00, -7.15586469e+00, -3.38546545e+00, -3.73316371e+00,\n", - " -4.06561456e+00, -6.12323427e+00, -4.45902448e+00, -2.23620498e+01,\n", - " -1.11402504e+01, -8.76936709e+00, -3.38528020e+00, -3.54128248e+00,\n", - " -1.50799442e+01, -5.18014918e+00, -5.46999037e+00, -5.55881441e+00,\n", - " -7.65469989e+00, -4.56037432e+00, -2.37177098e+01, -6.43359233e+00]),\n", - " 'c': array([65.6, 75. , 34.3, 49.2, 28.6, 2. , 3. , 0. , 0. , 0. , 0. ,\n", - " 0. , 5.3, 0. , 0. , 0. , 0. , 0. , 0. , 0.1, 0. , 0. ,\n", - " 0. , 0. , 0. , 0. , 0.2, 0. , 0. , 0. , 1.4, 0. , 0. ,\n", - " 0.7, 0. , 0.3, 0. , 0. , 0. , 0. , 0. , 0. , 19.1, 0. ,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ,\n", - " 67.8, 75. , 75. , 34.7, 52.5, 3.9, 0. , 0. , 0. , 0. , 0. ,\n", - " 0. , 0. , 0.2, 0. , 0. , 0. , 0. , 0. , 0. , 0.3, 0.2,\n", - " 0. , 0.2, 0.1, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 4.9,\n", - " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.6, 0. , 0. , 0.7,\n", - " 0.1])}}}" + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -57.85865149, -74.11694082, -70.19026064, -84.18831636,\n", + " -83.53374019, -71.85111259, -57.51445245, -84.49066923,\n", + " -72.5572182 , -79.28883464, -45.83532699, -129.85670736,\n", + " -313.31889514, -19.69863616, -9.36634841, -25.68620527,\n", + " -11.51693558, -9.62308996, -30.12383728, -12.26404949]),\n", + " 'z': array([30.18438732, 29.870457 , 29.04197513, 3.22187512, 25.36574105,\n", + " 37.05351538, 36.13086451, 36.08591068, 49.98065166, 54.27231097,\n", + " 28.48041137, 23.18674818, 41.32047561, 13.10132903, 3.27045503,\n", + " 3.49000542, 7.35901433, 5.1008803 , 14.51229556, 10.6276242 ]),\n", + " 'c': array([16.2, 22.1, 22.9, 28.2, 24. , 16.2, 14.4, 20.1, 16.9, 16.1, 12.8,\n", + " 22.9, 23. , 1.5, 0. , 0. , 0. , 0. , 0. , 0. ])},\n", + " 5: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -95.77587374, -85.33406005, -114.33498087, -112.34155807,\n", + " -101.60911145, -133.99463064, -8.74047116, -115.08657042,\n", + " -17.69012209, -103.04353705, -122.925984 , -78.92432405,\n", + " -101.91064899, -99.68935924, -30.05280358, -94.6588618 ,\n", + " -31.63711694, -14.87276245, -14.76882662, -21.25790984]),\n", + " 'z': array([15.75878618, 14.32290751, 30.06350849, 35.90840224, 6.94466408,\n", + " 33.31356661, 3.73396229, 39.76046643, 6.17335928, 45.46260125,\n", + " 14.57370589, 46.95368372, 19.03904578, 8.04376882, 22.22837657,\n", + " 3.83988157, 33.5729415 , 12.50391111, 0.96691551, 4.3192503 ]),\n", + " 'c': array([24.6, 23.4, 23.4, 25. , 29.4, 27.7, 0. , 21.5, 0. , 14.3, 17.3,\n", + " 17.3, 25.6, 26.8, 4.8, 26.8, 6.8, 3.3, 0. , 1.9])},\n", + " 6: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-121.51290919, -92.26852823, -21.4798191 , -78.72161812,\n", + " -92.70530608, -57.91764719, -98.39567541, -67.57846499,\n", + " -53.85569133, -54.57893978, -18.58912415, -31.86498026,\n", + " -25.25531387, -54.93528158, -16.70189186, -18.7636848 ,\n", + " -11.63996589, -15.92818131, -14.74570672, -21.61254021]),\n", + " 'z': array([25.29275722, 20.45706251, 11.31938166, 27.18819152, 6.56093936,\n", + " 35.39044708, 18.98274941, 25.62347078, 27.17950953, 33.98969634,\n", + " 8.61352965, 18.7021114 , 16.42592361, 22.47267129, 7.07717974,\n", + " 4.18982177, 2.08890093, 7.82016317, 8.789092 , 7.12586545]),\n", + " 'c': array([26.4, 21. , 5.6, 20.1, 25.6, 13.9, 23.1, 17.1, 13.1, 11.5, 1.9,\n", + " 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.6, 2.2])},\n", + " 7: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-108.38387466, -98.20150427, -34.85556526, -42.10022212,\n", + " -65.56799993, -11.34277597, -28.18381525, -10.79434242,\n", + " -7.90888795, -12.71270688, -21.11374718, -103.85949379,\n", + " -51.93365982, -152.8030517 , -175.08409233, -145.15263834,\n", + " -115.76310848, -102.18559016, -27.85745691, -13.47415484]),\n", + " 'z': array([21.44585732, 6.34545397, 14.20572753, 36.76684962, 25.82404116,\n", + " 6.43910912, 25.48781057, 4.73920333, 2.30441533, 2.37860444,\n", + " 20.16364963, 76.89157171, 56.37863798, 9.83543846, 13.79390946,\n", + " 10.22818282, 6.37749184, 3.97117169, 5.19507364, 2.85361773]),\n", + " 'c': array([29.5, 26.2, 1.2, 7.1, 16. , 0.7, 5.1, 2. , 0. , 0. , 4.7,\n", + " 14.4, 11. , 38. , 40.8, 40.2, 34.4, 35.4, 0. , 0. ])},\n", + " 8: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([-166.43640934, -174.6512315 , -81.928213 , -261.60196128,\n", + " -135.05830111, -152.05504637, -295.16612293, -238.92046484,\n", + " -136.93420936, -147.83613596, -242.14531438, -114.41628891,\n", + " -28.59287696, -37.56466098, -19.71274223, -69.81461789,\n", + " -33.5928915 , -178.88189746, -114.03204541, -49.9306826 ]),\n", + " 'z': array([98.32053454, 59.11481943, 26.0411492 , 74.75096028, 0.3754464 ,\n", + " 3.6909833 , 18.00529347, 29.42848687, 16.84944035, 22.69305223,\n", + " 34.00426581, 10.29195785, 24.08533589, 26.63153401, 9.93230311,\n", + " 23.82371447, 18.06219468, 30.78322225, 43.88140579, 45.76903533]),\n", + " 'c': array([24.3, 24.2, 21.1, 24.8, 39.7, 38.2, 37.7, 33.9, 27.2, 24.4, 30.2,\n", + " 29.1, 7. , 9.3, 3.5, 15.6, 6.2, 28.9, 20.4, 8.5])},\n", + " 9: {'x': array([ 500., 1000., 1500., 2000., 2500., 3000., 3500., 4000.,\n", + " 4500., 5000., 5500., 6000., 6500., 7000., 7500., 8000.,\n", + " 8500., 9000., 9500., 10000.]),\n", + " 'y': array([ -98.59481682, -76.35499153, -82.34566714, -97.722363 ,\n", + " -55.73676181, -62.23786681, -107.983981 , -123.65415846,\n", + " -60.05662397, -32.24639699, -45.29479073, -17.22421801,\n", + " -10.93949224, -51.29447895, -7.56834087, -6.51862324,\n", + " -5.432557 , -4.93144591, -8.47881117, -7.1310802 ]),\n", + " 'z': array([ 9.15574628, 18.6260246 , 20.07091993, 40.44450302, 26.40887359,\n", + " 34.10840208, 8.73629036, 11.55301326, 39.18273304, 11.03148291,\n", + " 34.53789721, 4.92900077, 5.66695823, 36.4908701 , 2.27143873,\n", + " 1.85179411, 1.44159655, 1.45038918, 2.62394397, 2.13812183]),\n", + " 'c': array([27.6, 24.5, 21.3, 21.5, 14.2, 15.8, 29.6, 26.1, 16.7, 6.5, 11.1,\n", + " 0. , 1.8, 11.2, 0. , 0. , 0. , 0. , 0. , 0. ])}}}" ] }, - "execution_count": 51, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -642,7 +653,7 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -655,12 +666,44 @@ "2\n", "3\n", "4\n", + "5\n", + "6\n", + "7\n", + "8\n", + "9\n", "sac\n", "0\n", "1\n", "2\n", "3\n", - "4\n" + "4\n", + "5\n", + "6\n", + "7\n", + "8\n", + "9\n", + "td3\n", + "0\n", + "1\n", + "2\n", + "3\n", + "4\n", + "5\n", + "6\n", + "7\n", + "8\n", + "9\n", + "ddpg\n", + "0\n", + "1\n", + "2\n", + "3\n", + "4\n", + "5\n", + "6\n", + "7\n", + "8\n", + "9\n" ] }, { @@ -669,13 +712,13 @@ "Text(0.5, 1.0, 'Task: Cartpole')" ] }, - "execution_count": 55, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj0AAAHLCAYAAAAjsimYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy80BEi2AAAACXBIWXMAAA9hAAAPYQGoP6dpAAB9WElEQVR4nO3dd1hT59sH8G8IEMKegijDPUFxo7Y4UOpqtbZVS52tWn+uOuqoWledbdU6W/u2jlar1bqq1Trq3orWvRHUMmSFJQTIef+IBMJMIJBAvp/rysXJOc85585pKjfPFAmCIICIiIiokjPRdwBERERE5YFJDxERERkFJj1ERERkFJj0EBERkVFg0kNERERGgUkPERERGQUmPURERGQUmPQQERGRUWDSQ0REREaBSQ8RlbmnT59CJBLhm2++0XcoFcaQIUPg7e2t7zCIKhUmPURGSiQSafQ6ceKEvkMtUlRUFCZPnoz69evD0tISVlZWaN68Ob766iskJCTo9F5r167Fxo0bdXpNIio/pvoOgIj045dfflF7v3nzZhw5ciTf/gYNGpRnWFq5fPkyunfvjuTkZHz00Udo3rw5AODKlStYvHgxTp06hcOHD+vsfmvXroWzszOGDBmis2sSUflh0kNkpD766CO19xcuXMCRI0fy7TdUCQkJ6NOnD8RiMa5du4b69eurHV+wYAF+/PFHndwrNTUVlpaWOrkWEekPm7eIqFAbNmxAp06dUKVKFUgkEjRs2BDr1q3LV+7KlSsICgqCs7MzpFIpatSogWHDhhV5bUEQMGLECJibm2PXrl2q/Y8fP8bjx4+Lje2HH37AixcvsGzZsnwJDwC4urpi5syZqvd79+5Fjx494O7uDolEglq1amH+/PnIyspSO69Dhw5o3Lgxrl69ijfffBOWlpb44osv4O3tjdu3b+PkyZOqpr8OHToAADZu3AiRSIRTp05h5MiRcHJygq2tLQYNGoT4+Ph8sa1duxaNGjWCRCKBu7s7Ro8erVFTnEKhwIoVK9CoUSNYWFjA1dUVI0eOLPAeRJQfa3qIqFDr1q1Do0aN8Pbbb8PU1BR//vkn/ve//0GhUGD06NEAgOjoaHTt2hUuLi6YNm0a7O3t8fTpU7VEJq+srCwMGzYM27dvx+7du9GjRw/Vsc6dOwNQdn4uyr59+yCVSvHee+9p9Fk2btwIa2trTJw4EdbW1vjnn3/w5ZdfIjExEV9//bVa2djYWHTr1g39+/fHRx99BFdXV3To0AFjx46FtbU1ZsyYAUCZWOU2ZswY2NvbY86cObh//z7WrVuHsLAwnDhxAiKRCAAwZ84czJ07F4GBgRg1apSq3OXLl3H27FmYmZkV+hlGjhyJjRs3YujQoRg3bhxCQ0OxevVqXLt2rdhziQiAQEQkCMLo0aOFvP8kpKam5isXFBQk1KxZU/V+9+7dAgDh8uXLhV47NDRUACB8/fXXQkZGhtCvXz9BKpUKf//9d76yXl5egpeXV7HxOjg4CE2aNCm2XLaCPsvIkSMFS0tLIS0tTbUvICBAACB8//33+co3atRICAgIyLd/w4YNAgChefPmglwuV+1funSpAEDYu3evIAiCEB0dLZibmwtdu3YVsrKyVOVWr14tABB+/vln1b7BgwerPYfTp08LAIQtW7ao3fvQoUMF7iei/Ni8RUSFkkqlqm2ZTIaYmBgEBATgyZMnkMlkAAB7e3sAwP79+5GRkVHk9eRyOd5//33s378ff/31F7p27ZqvzNOnT4ut5QGAxMRE2NjYlOizJCUlISYmBm+88QZSU1Nx7949tbISiQRDhw7V+NrZRowYoVbbMmrUKJiamuKvv/4CABw9ehRyuRyfffYZTExy/vkdPnw4bG1tceDAgUKvvWPHDtjZ2aFLly6IiYlRvZo3bw5ra2scP35c63iJjA2bt4ioUGfPnsXs2bNx/vx5pKamqh2TyWSws7NDQEAA+vbti7lz52L58uXo0KEDevfujQ8//BASiUTtnEWLFiE5ORkHDx5U9YcpKVtbWyQlJWlc/vbt25g5cyb++ecfJCYm5vssuVWrVg3m5uZax1SnTh2199bW1qhataoqiQsLCwMA1KtXT62cubk5atasqTpekIcPH0Imk6FKlSoFHo+OjtY6XiJjw6SHiAr0+PFjdO7cGfXr18eyZcvg4eEBc3Nz/PXXX1i+fDkUCgUA5Xw/O3fuxIULF/Dnn3/i77//xrBhw/Dtt9/iwoULsLa2Vl0zKCgIhw4dwtKlS9GhQwdYWFiUOL769evj+vXrkMvlxSYoCQkJCAgIgK2tLebNm4datWrBwsICISEhmDp1quqzZMtdK2QoFAoFqlSpgi1bthR43MXFpZwjIqp4mPQQUYH+/PNPpKenY9++ffD09FTtL6wZpU2bNmjTpg0WLFiArVu3Ijg4GNu2bcMnn3yiVubTTz9Fz5498f7772P37t0wNS3ZP0O9evXC+fPn8ccff2DAgAFFlj1x4gRiY2Oxa9cuvPnmm6r9oaGhWt0zuzNyYR4+fIiOHTuq3icnJyMiIgLdu3cHAHh5eQEA7t+/j5o1a6rKyeVyhIaGIjAwsNBr16pVC0ePHkW7du0MMikjqgjYp4eICiQWiwEoh5Znk8lk2LBhg1q5+Ph4tTIA0LRpUwBAenp6vusGBgZi27ZtOHToEAYOHJivlkXTIeuffvopqlatikmTJuHBgwf5jkdHR+Orr74q9LPI5XKsXbu22PvkZmVlVeTQ8vXr16v1a1q3bh0yMzPRrVs3AMrPbm5ujpUrV6rF8tNPP0Emk6mNYsvrgw8+QFZWFubPn5/vWGZmps5nnyaqjFjTQ0QF6tq1K8zNzdGrVy+MHDkSycnJ+PHHH1GlShVERESoym3atAlr165Fnz59UKtWLSQlJeHHH3+Era2tqoYjr969e2PDhg0YNGgQbG1t8cMPP6iOaTpk3cHBAbt370b37t3RtGlTtRmZQ0JC8Ntvv8Hf3x8A0LZtWzg4OGDw4MEYN24cRCIRfvnll3zJWnGaN2+OdevW4auvvkLt2rVRpUoVdOrUSXVcLpejc+fO+OCDD3D//n2sXbsW7du3x9tvvw1A2QQ1ffp0zJ07F2+99RbefvttVbmWLVsWOTFkQEAARo4ciUWLFuH69evo2rUrzMzM8PDhQ+zYsQPfffedxsP3iYyWXseOEZHBKGjI+r59+wRfX1/BwsJC8Pb2FpYsWSL8/PPPAgAhNDRUEARBCAkJEQYMGCB4enoKEolEqFKlitCzZ0/hypUrquvkHrKe29q1awUAwuTJk1X7NB2ynu2///4TJkyYINStW1ewsLAQLC0thebNmwsLFiwQZDKZqtzZs2eFNm3aCFKpVHB3dxemTJki/P333wIA4fjx46pyAQEBQqNGjQq8V2RkpNCjRw/BxsZGAKAavp49ZP3kyZPCiBEjBAcHB8Ha2loIDg4WYmNj811n9erVQv369QUzMzPB1dVVGDVqlBAfH69WJu+Q9Wzr168XmjdvLkilUsHGxkbw8fERpkyZIvz3338aPzMiYyUSBC3/1CEiIjXZEwZevnwZLVq00Hc4RFQI9ukhIiIio8Ckh4iIiIwCkx4iIiIyCnpNek6dOoVevXrB3d0dIpEIe/bsUTsuCAK+/PJLVK1aFVKpFIGBgXj48KFambi4OAQHB8PW1hb29vb4+OOPkZycXI6fgoiM3ZAhQyAIAvvzEBk4vSY9KSkpaNKkCdasWVPg8aVLl2LlypX4/vvvcfHiRVhZWSEoKAhpaWmqMsHBwbh9+zaOHDmC/fv349SpUxgxYkR5fQQiIiKqIAxm9JZIJMLu3bvRu3dvAMpaHnd3d0yaNAmTJ08GoJwYzdXVFRs3bkT//v1x9+5dNGzYUG3ExKFDh9C9e3c8f/4c7u7u+vo4REREZGAMdnLC0NBQREZGqk3Lbmdnh9atW+P8+fPo378/zp8/D3t7e7Uq5cDAQJiYmODixYvo06dPgddOT09XmylWoVAgLi4OTk5OxU4zT0RERIZBEAQkJSXB3d0dJibFN14ZbNITGRkJAHB1dVXb7+rqqjoWGRmZb8VhU1NTODo6qsoUZNGiRZg7d66OIyYiIiJ9ePbsGapXr15sOYNNesrS9OnTMXHiRNV7mUwGT09PPHv2DLa2tnqMjIiIiDSVmJgIDw8P2NjYaFTeYJMeNzc3AEBUVBSqVq2q2h8VFaVazNDNzQ3R0dFq52VmZiIuLk51fkEkEgkkEkm+/ba2tkx6iIiIKhhNu6YY7Dw9NWrUgJubG44dO6bal5iYiIsXL6oWEfT390dCQgKuXr2qKvPPP/9AoVCgdevW5R4zERERGS691vQkJyfj0aNHqvehoaG4fv06HB0d4enpic8++wxfffUV6tSpgxo1amDWrFlwd3dXjfBq0KAB3nrrLQwfPhzff/89MjIyMGbMGPTv358jt4iIiEiNXpOeK1euoGPHjqr32f1sBg8ejI0bN2LKlClISUnBiBEjkJCQgPbt2+PQoUOwsLBQnbNlyxaMGTMGnTt3homJCfr27YuVK1fqPFaFQgG5XK7z6xJpy8zMDGKxWN9hEBFVOAYzT48+JSYmws7ODjKZrMA+PXK5HKGhoVAoFHqIjig/e3t7uLm5cYoFIjJqxf3+zstgOzIbCkEQEBERAbFYDA8PD43mASAqK4IgIDU1VdWBP3cnfyIiKhqTnmJkZmYiNTUV7u7usLS01Hc4RJBKpQCA6OhoVKlShU1dREQaYrVFMbKysgAA5ubmeo6EKEd2Ap6RkaHnSIiIKg4mPRpi3wkyJPw+EhFpj0kPERERGQUmPURERGQUmPQQERGRUWDSQ0REREaBSQ8REREZBSY9WhIEAanyTL28tJk8u0OHDhgzZgzGjBkDOzs7ODs7Y9asWapreHt7Y/78+RgwYACsrKxQrVo1rFmzRu0a4eHheOedd2BtbQ1bW1t88MEHiIqK0unzJCIiKi+cnFBLrzKy0PDLv/Vy7zvzgmBprvl/sk2bNuHjjz/GpUuXcOXKFYwYMQKenp4YPnw4AODrr7/GF198gblz5+Lvv//G+PHjUbduXXTp0gUKhUKV8Jw8eRKZmZkYPXo0+vXrhxMnTpTRJyQiIio7THoqMQ8PDyxfvhwikQj16tXDzZs3sXz5clXS065dO0ybNg0AULduXZw9exbLly9Hly5dcOzYMdy8eROhoaHw8PAAAGzevBmNGjXC5cuX0bJlS719LiIiopJg0qMlqZkYd+YF6e3e2mjTpo3aJHb+/v749ttvVbNM+/v7q5X39/fHihUrAAB3796Fh4eHKuEBgIYNG8Le3h53795l0kNERBUOkx4tiUQirZqYiIiIyDCwI3MldvHiRbX3Fy5cQJ06dVQLVF64cCHf8QYNGgAAGjRogGfPnuHZs2eq43fu3EFCQgIaNmxYxpETERHpHpOeSiw8PBwTJ07E/fv38dtvv2HVqlUYP3686vjZs2exdOlSPHjwAGvWrMGOHTtUxwMDA+Hj44Pg4GCEhITg0qVLGDRoEAICAtCiRQt9fSQiIqISYztNJTZo0CC8evUKrVq1glgsxvjx4zFixAjV8UmTJuHKlSuYO3cubG1tsWzZMgQFKfsriUQi7N27F2PHjsWbb74JExMTvPXWW1i1apW+Pg4REVGpMOmpxMzMzLBixQqsW7euwOO2trb4/fffCz3f09MTe/fuLavwiIiIyhWbt4iIiMgoMOkhIiIio8DmrUqquFmTnz59Wi5xEBERGQrW9BAREZFRYNJDRERERoFJDxERERkFJj1ERERkFJj0EBERkVFg0kNERERGgUkPERERGQUmPURERGQUmPQQERGRUWDSoy1BAOQp+nkJglah7ty5Ez4+PpBKpXByckJgYCBSUlJw+fJldOnSBc7OzrCzs0NAQABCQkLUzk1ISMDIkSPh6uoKCwsLNG7cGPv379flkyQiIipXXIZCWxmpwEJ3/dz7i/8AcyuNikZERGDAgAFYunQp+vTpg6SkJJw+fRqCICApKQmDBw/GqlWrIAgCvv32W3Tv3h0PHz6EjY0NFAoFunXrhqSkJPz666+oVasW7ty5A7FYXMYfkIiIqOww6amkIiIikJmZiXfffRdeXl4AAB8fHwBAp06d1MquX78e9vb2OHnyJHr27ImjR4/i0qVLuHv3LurWrQsAqFmzZvl+ACIiIh1j0qMtM0tljYu+7q2hJk2aoHPnzvDx8UFQUBC6du2K9957Dw4ODoiKisLMmTNx4sQJREdHIysrC6mpqQgPDwcAXL9+HdWrV1clPERERJUBkx5tiUQaNzHpk1gsxpEjR3Du3DkcPnwYq1atwowZM3Dx4kWMGjUKsbGx+O677+Dl5QWJRAJ/f3/I5XIAgFQq1XP0REREuseOzJWYSCRCu3btMHfuXFy7dg3m5ubYvXs3zp49i3HjxqF79+5o1KgRJBIJYmJiVOf5+vri+fPnePDggR6jJyIi0i3W9FRSFy9exLFjx9C1a1dUqVIFFy9exMuXL9GgQQPUqVMHv/zyC1q0aIHExER8/vnnarU7AQEBePPNN9G3b18sW7YMtWvXxr179yASifDWW2/p8VMRERGVHGt6KilbW1ucOnUK3bt3R926dTFz5kx8++236NatG3766SfEx8ejWbNmGDhwIMaNG4cqVaqonf/HH3+gZcuWGDBgABo2bIgpU6YgKytLT5+GiIio9ESCoOXkL5VQYmIi7OzsIJPJYGtrq3YsLS0NoaGhqFGjBiwsLPQUIZE6fi+JiIr+/V0Q1vQQERGRUWDSQ0REREaBSQ8REREZBSY9REREZBSY9BAREZFRYNJDRERERoFJDxERERkFJj1ERERkFJj0EBERkVFg0kOVwptvvomtW7fqO4xSk8vl8Pb2xpUrV/QdChFRpcOkpxKLjIzE+PHjUbt2bVhYWMDV1RXt2rXDunXrkJqaqirn7e0NkUgEkUgEKysrNGvWDDt27Cjy2tnnbNu2Ld+xRo0aQSQSYePGjVrdIzExETNmzED9+vVhYWEBNzc3BAYGYteuXShqtZR9+/YhKioK/fv31/DJ6IYgCPjyyy9RtWpVSKVSBAYG4uHDh0WeM2fOHNVzyH7Vr19fddzc3ByTJ0/G1KlTyzp8IiKjw6Snknry5An8/Pxw+PBhLFy4ENeuXcP58+cxZcoU7N+/H0ePHlUrP2/ePERERODatWto2bIl+vXrh3PnzhV5Dw8PD2zYsEFt34ULFxAZGQkrK6t85Yu6R0JCAtq2bYvNmzdj+vTpCAkJwalTp9CvXz9MmTIFMpms0DhWrlyJoUOHwsSkfL/OS5cuxcqVK/H999/j4sWLsLKyQlBQENLS0oo8r1GjRoiIiFC9zpw5o3Y8ODgYZ86cwe3bt8syfCIi4yOQIJPJBACCTCbLd+zVq1fCnTt3hFevXgmCIAgKhUJIkafo5aVQKDT+TEFBQUL16tWF5OTkAo/nvpaXl5ewfPly1fuMjAzB0tJSmDZtWqHX9/LyEqZNmyZIJBIhPDxctX/48OHC2LFjBTs7O2HDhg0a32PUqFGClZWV8OLFi3z3SkpKEjIyMgqMIzo6WhCJRMKtW7fU9gMQvv/+e6FHjx6CVCoV6tevL5w7d054+PChEBAQIFhaWgr+/v7Co0ePCv2MRVEoFIKbm5vw9ddfq/YlJCQIEolE+O233wo9b/bs2UKTJk2KvX7Hjh2FmTNnFno87/eSiMgYFfX7uyCmes65KpxXma/Qemtrvdz74ocXYWlmWWy52NhYVQ1PQTUuACASiQo939TUFGZmZpDL5UXex9XVFUFBQdi0aRNmzpyJ1NRUbN++HSdPnsTmzZuLPDf3PRQKBbZt24bg4GC4u7vnK2ttbV3odc6cOQNLS0s0aNAg37H58+dj2bJlWLZsGaZOnYoPP/wQNWvWxPTp0+Hp6Ylhw4ZhzJgxOHjwIADg9OnT6NatW5Fx//DDDwgODkZoaCgiIyMRGBioOmZnZ4fWrVvj/PnzRTa1PXz4EO7u7rCwsIC/vz8WLVoET09PtTKtWrXC6dOni4yFiIi0w6SnEnr06BEEQUC9evXU9js7O6uaXkaPHo0lS5bkO1cul+Pbb7+FTCZDp06dir3XsGHDMGnSJMyYMQM7d+5ErVq10LRp0yLPyXuPmJgYxMfHq/Vt0VRYWBhcXV0LbNoaOnQoPvjgAwDA1KlT4e/vj1mzZiEoKAgAMH78eAwdOlRVvkWLFrh+/XqR93N1dQWg7C+V+33u49nHCtK6dWts3LgR9erVQ0REBObOnYs33ngDt27dgo2Njaqcu7s7wsLCioyFiIi0w6RHS1JTKS5+eFFv9y6NS5cuQaFQIDg4GOnp6WrHpk6dipkzZyItLQ3W1tZYvHgxevTogYULF2LhwoWqcnfu3FGrlejRowdGjhyJU6dO4eeff8awYcMKvX9h94iKiirxZ3r16hUsLCwKPObr66vazk5OfHx81PalpaUhMTERtra2kEqlqF27dolj0UTumiRfX1+0bt0aXl5e+P333/Hxxx+rjkmlUrXO5kREVHpMerQkEok0amLSp9q1a0MkEuH+/ftq+2vWrAlA+Qs1r88//xxDhgyBtbU1XF1dVc1fn376qaq2BEC+5idTU1MMHDgQs2fPxsWLF7F79+5C4yrsHi4uLrC3t8e9e/e0/qzOzs6Ij48v8JiZmZlqO/teBe1TKBQAtGvecnNzAwBERUWhatWqquNRUVHF1nTlZm9vj7p16+LRo0dq++Pi4uDi4qLxdYiIqHhMeiohJycndOnSBatXr8bYsWML7deTm7Ozc4G1HI6OjnB0dCzy3GHDhuGbb75Bv3794ODgoPU9TExM0L9/f/zyyy+YPXt2vsQqOTkZFhYWMDXN/3X18/NDZGQk4uPji7y3JrRp3qpRowbc3Nxw7NgxVZKTmJiIixcvYtSoURrfMzk5GY8fP8bAgQPV9t+6dQt+fn5axU9EREXjkPVKau3atcjMzESLFi2wfft23L17F/fv38evv/6Ke/fuQSwW6+xeDRo0QExMTL7h69pYsGABPDw80Lp1a2zevBl37tzBw4cP8fPPP8PPzw/JyckFnufn5wdnZ2ecPXu2xPfOlt28VdQru9+NSCTCZ599hq+++gr79u3DzZs3MWjQILi7u6N3796qa3bu3BmrV69WvZ88eTJOnjyJp0+f4ty5c+jTpw/EYjEGDBigFsvp06fRtWvXUn8mIiLKwZqeSqpWrVq4du0aFi5ciOnTp+P58+eQSCRo2LAhJk+ejP/97386vZ+Tk1Opznd0dMSFCxewePFifPXVVwgLC4ODgwN8fHzw9ddfw87OrsDzxGIxhg4dii1btqBnz56likFbU6ZMQUpKCkaMGIGEhAS0b98ehw4dUutj9PjxY8TExKjeP3/+HAMGDEBsbCxcXFzQvn17XLhwQa0p6/z585DJZHjvvffK9fMQEVV2IkEoYqpbI5GYmAg7OzvIZDLY2tqqHUtLS0NoaChq1KhRaIdZ0q/IyEg0atQIISEh8PLy0nc4pdavXz80adIEX3zxRaFl+L0kIir693dB2LxFFZ6bmxt++uknhIeH6zuUUpPL5fDx8cGECRP0HQoRUaXD5i2qFHL3o6nIzM3NMXPmTH2HQURUKbGmh4iIiIyCQSc9WVlZmDVrFmrUqAGpVIpatWph/vz5aituCyVY6ZqIiIiMj0EnPUuWLMG6deuwevVq3L17F0uWLMHSpUuxatUqVZmSrnRNRERExsWg+/ScO3cO77zzDnr06AEA8Pb2xm+//YZLly4BUNbyrFixAjNnzsQ777wDANi8eTNcXV2xZ8+eQhd9TE9PV1uGITExsYw/CREREembQdf0tG3bFseOHcODBw8AAP/++y/OnDmjWiqguJWuC7No0SLY2dmpXh4eHmX7QYiIiEjvDLqmZ9q0aUhMTET9+vUhFouRlZWFBQsWIDg4GEDJV7qePn06Jk6cqHqfmJjIxIeIiKiSM+ik5/fff8eWLVuwdetWNGrUCNevX8dnn30Gd3d3DB48uMTXlUgkkEgkOoyUiIiIDJ1BN299/vnnmDZtGvr37w8fHx8MHDgQEyZMwKJFiwBAbaXr3KKiolTHyDi8+eab2Lp1q77D0Ik2bdrgjz/+0HcYRESVjkEnPampqTAxUQ9RLBZDoVAAUF/pOlv2Stf+/v7lGqshioyMxPjx41G7dm1YWFjA1dUV7dq1w7p165Camqoq5+3tDZFIBJFIBCsrKzRr1gw7duwo8trZ52zbti3fsUaNGkEkEmHjxo1a3SMxMREzZsxA/fr1YWFhATc3NwQGBmLXrl0oarWUffv2ISoqqtCO62Vl165d6Nq1K5ycnCASiYpdoT3bjh07VJ/Rx8cHf/31l9rxmTNnYtq0aarvORER6YZBJz29evXCggULcODAATx9+hS7d+/GsmXL0KdPHwCar3RtjJ48eQI/Pz8cPnwYCxcuxLVr13D+/HlMmTIF+/fvx9GjR9XKz5s3DxEREbh27RpatmyJfv364dy5c0Xew8PDI9/K6hcuXEBkZCSsrKzylS/qHgkJCWjbti02b96M6dOnIyQkBKdOnUK/fv0wZcoUyGSyQuNYuXIlhg4dmi9BLmspKSlo3749lixZovE5586dw4ABA/Dxxx/j2rVr6N27N3r37o1bt26pynTr1g1JSUk4ePBgWYRNRGS8BAOWmJgojB8/XvD09BQsLCyEmjVrCjNmzBDS09NVZRQKhTBr1izB1dVVkEgkQufOnYX79+9rdR+ZTCYAEGQyWb5jr169Eu7cuSO8evVKdb+slBS9vBQKhcafKSgoSKhevbqQnJxc4PHc1/Ly8hKWL1+uep+RkSFYWloK06ZNK/T6Xl5ewrRp0wSJRCKEh4er9g8fPlwYO3asYGdnJ2zYsEHje4waNUqwsrISXrx4ke9eSUlJQkZGRoFxREdHCyKRSLh165bafgDC999/L/To0UOQSqVC/fr1hXPnzgkPHz4UAgICBEtLS8Hf31949OhRoZ9RU6GhoQIA4dq1a8WW/eCDD4QePXqo7WvdurUwcuRItX1Dhw4VPvroo0Kvk/d7SURkjIr6/V0Qg+7IbGNjgxUrVmDFihWFlhGJRJg3bx7mzZtXLjEJr17hfrPm5XKvvOqFXIXI0rLYcrGxsaoanoJqXADlcyuMqakpzMzMIJfLi7yPq6srgoKCsGnTJsycOROpqanYvn07Tp48ic2bNxd5bu57KBQKbNu2DcHBwXB3d89X1trautDrnDlzBpaWlmjQoEG+Y/Pnz8eyZcuwbNkyTJ06FR9++CFq1qyJ6dOnw9PTE8OGDcOYMWNUNSqnT59WTYdQmB9++EE1erAkzp8/rzZyEACCgoKwZ88etX2tWrXC4sWLS3wfIiLKz6CTHiqZR48eQRAE1KtXT22/s7Ozaqbq0aNHF9gsI5fL8e2330Imk6FTp07F3mvYsGGYNGkSZsyYgZ07d6JWrVpo2rRpkefkvUdMTAzi4+NRv359zT/ka2FhYXB1dS2waWvo0KH44IMPAABTp06Fv78/Zs2ahaCgIADA+PHjMXToUFX5Fi1aFNsvJ+/0CNqKjIzUaIoFd3d3PHv2DAqFotyb7YiIKismPVoSSaWoF3JVb/cujUuXLkGhUCA4OFhtRmpAmRTMnDkTaWlpsLa2xuLFi9GjRw8sXLgQCxcuVJW7c+cOPD09Ve979OiBkSNH4tSpU/j5558xbNiwQu9f2D3yjr7TxqtXr2BhYVHgMV9fX9V2dqLh4+Ojti8tLQ2JiYmwtbWFVCpF7dq1SxyLLkmlUigUCqSnp0Nayv/uRESkxKRHSyKRSKMmJn2qXbs2RCIR7t+/r7a/Zs2aAFDgL9HPP/8cQ4YMgbW1NVxdXVXNX59++qmqtgRAvuYnU1NTDBw4ELNnz8bFixexe/fuQuMq7B4uLi6wt7fHvXv3tP6szs7OiI+PL/CYmZmZajv7XgXtyx4lVR7NW25ubhpNsRAXFwcrKysmPEREOsSkpxJycnJCly5dsHr1aowdO7bQfj25OTs7F1jL4ejoCEdHxyLPHTZsGL755hv069cPDg4OWt/DxMQE/fv3xy+//ILZs2fnS6ySk5NhYWEBU9P8X1c/Pz9ERkYiPj6+yHtrojyat/z9/XHs2DF89tlnqn1HjhzJN8XCrVu34OfnV6p7ERGROiY9ldTatWvRrl07tGjRAnPmzIGvry9MTExw+fJl3Lt3D82b664zdoMGDRATEwPLUtSALViwACdOnEDr1q2xYMECtGjRAmZmZjh9+jQWLVqEy5cvw97ePt95fn5+cHZ2xtmzZ9GzZ89SfApo3bwVFxeH8PBw/PfffwCgqllzc3NT1dwMGjQI1apVU02oOX78eAQEBODbb79Fjx49sG3bNly5cgXr169Xu/bp06fRtWvXUn0eIiJSx6SnkqpVqxauXbuGhQsXYvr06Xj+/DkkEgkaNmyIyZMn43//+59O7+fk5FSq8x0dHXHhwgUsXrwYX331FcLCwuDg4AAfHx98/fXXsLOzK/A8sViMoUOHYsuWLaVOerS1b98+tY7Q2ZMjzp49G3PmzAEAhIeHq3VEbtu2LbZu3YqZM2fiiy++QJ06dbBnzx40btxYVebFixc4d+4cfv311/L5IERERkIkCEVMdWskEhMTYWdnB5lMBltbW7VjaWlpCA0NRY0aNQrtMEv6FRkZiUaNGiEkJAReXl76DqfUpk6divj4+Hy1P7nxe0lEVPTv74JwLCxVeG5ubvjpp58QHh6u71B0okqVKpg/f76+wyAiqnTYvEWVQmVadmTSpEn6DoGIqFJiTQ8REREZBSY9REREZBSY9BARERWB430qD/bpISIiKsSWi2H4cu9t+FSzQw+fqmhUzRaPo5MRFpuKt5u6w7e6vb5DJC0w6SGiiiHiBnBhLdB+AuBSr/jyRMUQBAGCAJiYiAo8fjUsHrP33kaWQsD1Zwm4/ixB7fiRu1H4Z1IHiHOd/zIpHaExKWjp7aBa6oYMB5MeIjJ8kTeBTb2AtATl+z7f6zUcqtjCY1Ox9VI4dlx5BhsLU6z+sBkaV1OfADU2OR1jtoYgUyGgW2M3tK3lhAM3I/Ai4RVqu1jjSlg8wmJT8c+9aHRp6Iq0jCz8dCYUa44/Qqo8CxMC62J8YB09fUIqDJMeIjJs0feAzb1zEp6ws/qMhiooQRBw+mEM/u9MKE49eKnaH5siR99157C4rw/6+FUHAKRlZGH8tuuIkKWhlosVvn6/Cawlphjo7606b9HBu/jh5BP8fCYUHeq5YOBPF3H5ac7ix8uPPkBDd1t0aVi69fpIt9iRmSqFN998E1u3btV3GKUml8vh7e2NK1eu6DsUwxD7GNj8NpAaA7j5AiIxkBAOJDzTd2RUgdz5LxGDfr6EQT9fwqkHLyESAQF1XbDmw2boWM8F6ZkKTNj+Lyb+fh3DN19B03mHceZRDKRmYqz7qDmsJfnrBwb7e0NsIsL5J7EY9WsILj+Nh43EFN/1b4pB/sqZ4Sdsv45H0ckQBAE7rz7Huccx5f3RKQ8mPZVYZGQkxo8fj9q1a8PCwgKurq5o164d1q1bh9TUVFU5b29viEQiiEQiWFlZoVmzZtixY0eR184+Z9u2bfmONWrUCCKRCBs3btTqHomJiZgxYwbq168PCwsLuLm5ITAwELt27Spy9MS+ffsQFRWlWvuqvMyZMwf169eHlZUVHBwcEBgYiIsXLxZ73po1a+Dt7Q0LCwu0bt0aly5dUh0zNzfH5MmTMXXq1LIMvWKIDwM2vQ0kRwGujYFBewH3pspjrO0hDUTIXmHyjn/RY9VpnH4YA3OxCYa1q4GTkzti07BW6OFbFT8NbolxnZQLDe8KeYEjd6KQlqGAu50FVn/oh7quNgVe291eim6NlQsLH70bBQD45oMmeKdpNczq2RCtajgiOT0TIzZfwVcH7mLyjn8xcvNVZGQpyufDU4GY9FRST548gZ+fHw4fPoyFCxfi2rVrOH/+PKZMmYL9+/fj6NGjauXnzZuHiIgIXLt2DS1btkS/fv1w7ty5Iu/h4eGBDRs2qO27cOECIiMjYWVlla98UfdISEhA27ZtsXnzZkyfPh0hISE4deoU+vXrhylTpkAmkxUax8qVKzF06FC1hT3LQ926dbF69WrcvHkTZ86cgbe3N7p27YqXL18Wes727dsxceJEzJ49GyEhIWjSpAmCgoIQHR2tKhMcHIwzZ87g9u3b5fExDJPshbIPT+JzwLkeMHAPYOkIeLVTHmfSQ4UQBAGXn8Zh3G/X8ObS49h59TkEAejVxB3HJgXgy14N4elkqSpvYiLCxK718H+DWqBDPRdM7FIXf417A2endULnBkU3TX3cvoZqe/gbNRDUSJkEmYlNsDa4GarZS/EkJgU/nQkFACSlZ+LG88L/LaNyIJAgk8kEAIJMJst37NWrV8KdO3eEV69eCYIgCAqFQpCnZerlpVAoNP5MQUFBQvXq1YXk5OQCj+e+lpeXl7B8+XLV+4yMDMHS0lKYNm1aodf38vISpk2bJkgkEiE8PFy1f/jw4cLYsWMFOzs7YcOGDRrfY9SoUYKVlZXw4sWLfPdKSkoSMjIyCowjOjpaEIlEwq1bt9T2AxC+//57oUePHoJUKhXq168vnDt3Tnj48KEQEBAgWFpaCv7+/sKjR48K/Yzayv4eHT16tNAyrVq1EkaPHq16n5WVJbi7uwuLFi1SK9exY0dh5syZhV4n7/eyUkmMFISVzQRhtq0grGgiCLL/co7dO6jc/52f3sIjw5SUliFsPv9U6LrspOA1db/q9f7354SQsLgyu++Sg3eF6btuCPLMrHzHbj5PEOrN/Evwmrpf8Jt3WPCaul9Y/c/DMovFGBX1+7sg7MispUy5AuvHn9TLvUd8FwAzibjYcrGxsaoanoJqXAAUOZTS1NQUZmZmkMvlRd7H1dUVQUFB2LRpE2bOnInU1FRs374dJ0+exObNm4s8N/c9FAoFtm3bhuDgYLi7u+cra21tXeh1zpw5A0tLSzRo0CDfsfnz52PZsmVYtmwZpk6dig8//BA1a9bE9OnT4enpiWHDhmHMmDE4ePAgAOD06dPo1q1bkXH/8MMPCA4OzrdfLpdj/fr1sLOzQ5MmTQo8Vy6X4+rVq5g+fbpqn4mJCQIDA3H+/Hm1sq1atcLp06eLjKVSSokFNr8DxD4C7DyBwX8CtlVzjnu2ASAC4h4DSZGAjZveQiXDIAgCdl97gQUH7iI2RflvloWZCXo3rYaP2njlG5Wla1Peql/oscbV7LB3dHv8l/AKYbEpmPPnHVx4EovRHWuXaUxUOCY9ldCjR48gCALq1VOfy8TZ2RlpaWkAgNGjR2PJkiX5zpXL5fj2228hk8nQqVOnYu81bNgwTJo0CTNmzMDOnTtRq1YtNG3atMhz8t4jJiYG8fHxqF+/8H88ChMWFgZXV9cCm7aGDh2KDz74AAAwdepU+Pv7Y9asWQgKCgIAjB8/HkOHDlWVb9GiBa5fv17k/Vxd1au79+/fj/79+yM1NRVVq1bFkSNH4OzsXOC5MTExyMrKyncNV1dX3Lt3T22fu7s7wsLCioyl0nkVD/zyDvDyLmDjDgzeC9h7qJeR2gNuPkDkDeDpGcDnPb2ESmVHEASExqTgWngCbKVmaFPTETYWZgWWffIyGTP33MK5x7EAAG8nSwzy90bf5tVhJy34nPJWz80G9dxscD8yCQBw5Wk8MrIUMBOzd4k+MOnRkqm5CUZ8F6C3e5fGpUuXoFAoEBwcjPT0dLVjU6dOxcyZM5GWlgZra2ssXrwYPXr0wMKFC7Fw4UJVuTt37sDT01P1vkePHhg5ciROnTqFn3/+GcOGDSv0/oXdIyoqqsSf6dWrV7CwsCjwmK+vr2o7O9Hw8fFR25eWlobExETY2tpCKpWidm3t/gLr2LEjrl+/jpiYGPz444/44IMPcPHiRVSpUqUEnyaHVCpV62xe6aUlAr/2Vc7HY1UFGLwPcKxZcFnv9sqkJ+wck55KIDNLgbsRSbj0NA6XQ+NwJSwOMck5tcxiExGaetijfW1ntK/jjKYe9lAIAtadeIy1xx9DnqWAxNQE4zrXwfA3asLc1DCTiTpVrOFoZY6410Pk41Pl2DWqHVxsJPoOzagw6dGSSCTSqIlJn2rXrg2RSIT79++r7a9ZU/lLRCqV5jvn888/x5AhQ2BtbQ1XV1dV89enn36qqi0BkK/5ydTUFAMHDsTs2bNx8eJF7N69u9C4CruHi4sL7O3t89V2aMLZ2Rnx8fEFHjMzy/lLL/teBe1TKJSjKUrSvGVlZYXatWujdu3aaNOmDerUqYOffvpJrQkrd6xisThfkhcVFQU3N/Vmmri4OLi4uBQZS6UhTwG2fgC8uApIHZWjtJyLmNTNq61yZmZ2Zi43giAgOT0TcSlyxCTLEZucjtgU5c+YZDliU+SIS0lHbLIcSWmZMDc1gcTUBBZmYliYvf5pqtyWmoshMRXDTCzCvcgkhITFI0WepXY/c1MTNKluh5hkOUJjUnA1LB5Xw+Lx3bGHsJaYwsbCFBEyZa31m3VdMP+dRvByKrgp31CYmIjQuoYjDt6KVHVm3nv9BT55o5DknsoEk55KyMnJCV26dMHq1asxduzYQvv15Obs7FxgLYejoyMcHR2LPHfYsGH45ptv0K9fPzg4OGh9DxMTE/Tv3x+//PILZs+enS+xSk5OhoWFBUxN839d/fz8EBkZifj4+CLvrYmSNG/lpVAo8tWiZTM3N0fz5s1x7Ngx9O7dW1X+2LFjGDNmjFrZW7duwc/PT+PYK6yMV8Bv/YHw84DEDhi4G3BtWPQ5nm2VP1/eA1JiAKuCmxONTWaWAumZCqRlZCE9U/H6lYW0DAXSc+3LOf76WGYW0jPUj6VlZCE+VY7Y1wlOTIoc8syyG2pta2GKFt6OaOHtgFbejvCpbgeJqfKPy+fxqTjzMAanH8Xg3KMYxKdmIDk9Ey42EnzZsyF6+latMMs9dG7gioO3ImEjMUVSeiYO3Ixg0lPOmPRUUmvXrkW7du3QokULzJkzB76+vjAxMcHly5dx7949NG/eXGf3atCgAWJiYmBpaVl84UIsWLAAJ06cQOvWrbFgwQK0aNECZmZmOH36NBYtWoTLly/D3t4+33l+fn5wdnbG2bNn0bNnz1J8CmjVvJWSkoIFCxbg7bffRtWqVRETE4M1a9bgxYsXeP/991XlOnfujD59+qiSmokTJ2Lw4MFo0aIFWrVqhRUrViAlJUWtbxGgrHWaP39+qT6PwctMB7Z/BISeAsytgYG7cubhKYqVE1ClIRB9R9nE1fDtMg/VUPxw8jF2hbxQJip5kpgsRdmvBG5lLoaTtQSOVuZwtjaHk5UETtbmcLKWqN7bWJhCnqXAK3kW0jKykJYdY4YyyXqVkaWK2cvJEi29HVHP1abQ9a+qO1iifytP9G/lCYVCwJ2IRITFpqJ9HWeD6bejqXf9qqFhVVvYSk3xxtLjuBaegP8SXsHdPn/te15ZCgHpmVmwNOev7dLg06ukatWqhWvXrmHhwoWYPn06nj9/DolEgoYNG2Ly5Mn43//+p9P7OTk5lep8R0dHXLhwAYsXL8ZXX32FsLAwODg4wMfHB19//TXs7AoegSEWizF06FBs2bKl1EmPNsRiMe7du4dNmzYhJiYGTk5OaNmyJU6fPo1GjRqpyj1+/BgxMTmzsPbr1w8vX77El19+icjISDRt2hSHDh1Sq0E6f/48ZDIZ3nuvEvdXycoAdgwFHh0FzCyB4B1A9Raan+/V7nXSc9aokp6Y5HTcj0oqtpy52AQSMxNITMWQmCq3LUzFr/cpm5skpjnHVe9zlbO3zJPYWEkgNddv076JiQiNq9mV+YissmJiIkJDd1sAQEsvR1x6GocfTj5GcBsvtUkQBUHA+SexCAmLR5uaTmjh7YiZe25hx5Vn2D+uPeq72errI1R4IkEoYqpbI5GYmAg7OzvIZDLY2qp/mdLS0hAaGooaNWoU2mGW9CsyMhKNGjVCSEgIvLy89B1OqfXr1w9NmjTBF198UWiZCv29zMoEdn0C3N4NiCVA8O9AzQ7aXePWLmDnUMDVBxh1pkzCNESPXyYjUpamSlgsshObXMmMudik0FoTMhwbz4Zizp93AAAiEXBg7BuqhGhXyHNM/P1fAICLjQT7x7ZH64XHAACftK+BmT2LaQI2IkX9/i4Ia3qownNzc8NPP/2E8PDwCp/0yOVy+Pj4YMKECfoOpWwoFMDe0cqEx8QM6Per9gkPkDMzc9Qt5VB3aen6c1UUtVysUcul8HmrqOJ4r4UHroYn4OKTWEQnpePInShV0vP37UhVuZdJ6Zi//47qfXRSwX0GSTMlSnqOHTuGY8eOITo6WjXyJdvPP/+sk8CItJHdMbiiMzc3x8yZM/UdRtkQBODABODGNuXCoe9vAOp2Ldm1bFwBpzpA7EMg/AJQr+hRd0SGxlpiilUD/LD1Yji+2H0TZx/FYEyn2lAIAs49Us475GxtjphkOfbfiFCdd/s/LmNRGlpPaDB37lx07doVx44dU00ql/tFRJSPIACHpgFXNwIiE+Dd9UCDXqW7ptfrUVxPjad5iyqf9rWVow8vPY1D529PoM6Mg0hKz4SDpRlGvllLVS57gNqTmBSkyjP1EWqloHVNz/fff4+NGzdi4MCBZREPEVU2ggAcnQ1c/F75/p01uplU0Ls9ELKJ8/VQhebpZAlPR0uEx6XiaWzOhKTtajvjzbouWPDXXQDAvLcb4btjj5Sd2SOT4OdpHE26uqZ1TY9cLkfbtm3LIhaDxv7eZEjyNisbtBOLgbPfKbd7Lgeafqib62b364n4F0gvfkQTkaFq6mGv2na0MgcAdGnoirqu1uju44Y367qgX0tPVZ+fmy/YxFVSWtf0fPLJJ9i6dStmzZpVFvEYHDMzM4hEIrx8+RIuLi4VZhIsqpwEQYBcLsfLly9hYmICc3NzfYdUtNPLgJOLldtvLQZaFL5MidbsqgEO3kD8UyD8IlAnUHfXJipHwa098eeN//BRay98FlgHIeEJCGxQBSKRCGuDc+ZUa+HlgFMPXuLCk1gM8vfWX8AVmNZJT1paGtavX4+jR4/C19dXbVp/AFi2bJnOgjMEYrEY1atXx/Pnz/H06VN9h0MEALC0tISnp2eBC60ajPNrgWNzldudZwNtRun+Hl7tlElP2BkmPVRhta7phJtzgmBlLoZIJEKXhgXP/N6uthOWHQHOP45FQqocZx7FILCBKyzMDHtpJEOiddJz48YN1Srat27dUjtWWWtBrK2tUadOHWRkZOg7FCKIxWKYmpoa9v9vV34G/n69/ljAVOCNiWVzH692wPUtwFP266GKzVpS/K9j3+r2sDQXIz41Ax2+OYGE1AyMeLMmvujeoBwirBy0TnqOHz9eFnEYPLFYDLGY2TRRsa5vBfa/nmeo3XigQ/7FV3XG+3W/nv9CAHkqYF7ypVCIDJ2Z2AStajjixP2XSEhV/hG+K+Q5kx4tlKpu/Pnz53j+/LmuYiGiiu7mTuXkgwDQ+lMgcG7OWNuyYO8F2FYDFJnA80tldx8iA9HIXX3WYVuLirX+mL5pnfQoFArMmzcPdnZ28PLygpeXF+zt7TF//vyKNaKEiHTr7p/ArhGAoACaDVZ2XC7rJjiRKGcUF5u4yAh4OVmpvbex4MIK2tD6ac2YMQM//fQTFi9ejHbtlP/YnDlzBnPmzEFaWhoWLFig8yCJyMA9OKxcQFTIAnz7Az1XlH3Ck827HXDzd87XQ0bBy1G9CdeaSY9WtH5amzZtwv/93//h7bdzVjb29fVFtWrV8L///Y9JD5GxUWQBR+cAigygUR/l5IPlOarMq73y5/MrQEYaYFbBFmAl0oK3s3pNT0YW55DThtb/MsXFxaF+/fr59tevXx9xcXE6CYqIKhATMTBwF9BmNPDuj4C4nP/ydKoFWFUBstKBF1fL995E5ayKjUTtfVIal6TQhtZJT5MmTbB69ep8+1evXo0mTZroJCgiqmBs3IC3FgJiPXSqFIlyRnGxiYsqubxTVSSncyoVbWj9J9nSpUvRo0cPHD16FP7+/gCA8+fP49mzZ/jrr790HiARUbG82gG3dzPpIaOTzJoerWhd0xMQEIAHDx6gT58+SEhIQEJCAt59913cv38fb7zxRlnESERUNO/X/XqeXQKy+JcvVW5L+vqotpPTM7k2pBZK1Pju7u7ODstEZDic6wFSR+BVHPDfNcCjlb4jIioz/Vp6IqiRG5rOO4KMLAHpmQouRaEhjZKeGzduoHHjxjAxMcGNGzeKLOvr66uTwIiINGZiAni1Be7tB56eYdJDlZ6thRlEIkAQgGEbL2PlAD84W0uKP9HIaZT0NG3aFJGRkahSpQqaNm0KkUhUYHWaSCRCVlaWzoMkIiqWd3tl0hN2ruzW+iIyECYmIlibmyIpPRPnHsfiu6MPMb93Y32HZfA0SnpCQ0Ph4uKi2iYiMjhebZU/wy8AWZnlP3SeqJxZWyiTHgCIkKXpOZqKQaN/Fby8vFTbYWFhaNu2LUxN1U/NzMzEuXPn1MoSEZUb18aAxA5IlwGRN4BqzfQdEVGZysjKWfrJ0YprcGlC69FbHTt2LHASQplMho4dO+okKCIirZmIAS/lNBoIO6ffWIjKQUyyXLWdKmfXEk1onfQIgpBvciQAiI2NhZWVVQFnEBGVEy9OUkjGKS5FXnwh0nzI+rvvvgtA2Vl5yJAhkEhyeolnZWXhxo0baNu2re4jJCLSlCrpOQcoFOW7BhiRHjHp0YzGSY+dnR0AZU2PjY0NpFKp6pi5uTnatGmD4cOH6z5CIiJNVW0CmFsDaQlA9G3AzafYU4gqqjUfNsPorSEAgFgmPRrROOnZsGEDAMDb2xuTJ09mUxYRGR6xKeDRGnh8TFnbw6SHKrEevlXRzKsT/Bf9g5dJ6Ri28TJMRCKsH9gcJib5u6FQCfr0zJ49mwkPERmu7MVHn57RbxxE5cDRyly1/c+9aBy9G4WnsSl6jMiwlWgii507d+L3339HeHg45HL1KrWQkBCdBEZEVCK5+/UIgnIVdqJKSmKaf/mJJy9TUNPFWg/RGD6ta3pWrlyJoUOHwtXVFdeuXUOrVq3g5OSEJ0+eoFu3bmURIxGR5tybAaZSIDUGiHmg72iIyl1oDGt6CqN10rN27VqsX78eq1atgrm5OaZMmYIjR45g3LhxkMlkZREjEZHmTM0Bj5bKbTZxkRGo5aLscmIjUTbePIlJ1mc4Bk3rpCc8PFw1NF0qlSIpKQkAMHDgQPz222+6jY6IqCQ4Xw8ZkZ+HtMTqD/1Ua289fsmansJonfS4ubmpZmT29PTEhQsXACjX5CpoEVIionKXnfQ8Pavs10NUiXk5WaGnrztqvq7xYfNW4bROejp16oR9+/YBAIYOHYoJEyagS5cu6NevH/r06aPzAImItFa9BSA2B5Ijgbgn+o6GqFy42VoAAGKT06FQMNkviNajt9avXw+FQrnI2ejRo+Hk5IRz587h7bffxsiRI3UeIBGR1sykQLUWQPg5ZROXUy19R0RU5uwtlcPXFQKQlJYJO0suQpqX1jU9JiYmaius9+/fHytXrsTYsWNhbm5exJkl8+LFC3z00UdwcnKCVCqFj48Prly5ojouCAK+/PJLVK1aFVKpFIGBgXj48KHO4yCiCsbr9bI4T9mvh4yDuakJrF93Zo5L5QzNBdGopufGjRsaX9DX17fEweQVHx+Pdu3aoWPHjjh48CBcXFzw8OFDODg4qMosXboUK1euxKZNm1CjRg3MmjULQUFBuHPnDiwsLHQWCxFVMN7tgNPfcMV1Mir2lmZITs9EfKocNcCJhPPSKOlp2rQpRCJRsR2VRSIRsrJ0t7z9kiVL4OHhoVoCAwBq1Kih2hYEAStWrMDMmTPxzjvvAAA2b94MV1dX7NmzB/379y/wuunp6UhPT1e9T0xM1FnMRGQgPFoDJqaALBxICAfsPfUdEVGZc7A0x/P4V4jnWlwF0qh5KzQ0FE+ePEFoaGiRrydPdNthcN++fWjRogXef/99VKlSBX5+fvjxxx/V4oqMjERgYKBqn52dHVq3bo3z588Xet1FixbBzs5O9fLw8NBp3ERkAMytgKpNldts4iIj4fB6WYr41Aw9R2KYNKrp8fLyKus4CvTkyROsW7cOEydOxBdffIHLly9j3LhxMDc3x+DBgxEZGQkAcHV1VTvP1dVVdawg06dPx8SJE1XvExMTmfgQVUbe7YAXV4CwM0DTAfqOhqjMObzuvMyanoKVaO2tx48fY8WKFbh79y4AoGHDhhg/fjxq1dLtCAmFQoEWLVpg4cKFAAA/Pz/cunUL33//PQYPHlzi60okEkgkEl2FSUSGyqs9cPY79usho+FgmV3Tw6SnIFqP3vr777/RsGFDXLp0Cb6+vvD19cXFixfRqFEjHDlyRKfBVa1aFQ0bNlTb16BBA4SHhwNQTpQIAFFRUWploqKiVMeIyIh5tgZEJsq5ehIj9B0NUZlj0lM0rZOeadOmYcKECbh48SKWLVuGZcuW4eLFi/jss88wdepUnQbXrl073L9/X23fgwcPVM1tNWrUgJubG44dO6Y6npiYiIsXL8Lf31+nsRBRBWRhB7j5KLe5JAUZAUer7OYt9ukpiNZJz927d/Hxxx/n2z9s2DDcuXNHJ0FlmzBhAi5cuICFCxfi0aNH2Lp1K9avX4/Ro0cDUI4W++yzz/DVV19h3759uHnzJgYNGgR3d3f07t1bp7EQUQXl1V75k4uPkhFwslZ23bj2LB7RiWl6jsbwaJ30uLi44Pr16/n2X79+HVWqVNFFTCotW7bE7t278dtvv6Fx48aYP38+VqxYgeDgYFWZKVOmYOzYsRgxYgRatmyJ5ORkHDp0iHP0EJGSd/bio+zXQ5XfG3Wc4e1kiajEdPx0JlTf4RgcrTsyDx8+HCNGjMCTJ09Uq62fPXsWS5YsURsRpSs9e/ZEz549Cz0uEokwb948zJs3T+f3JqJKwPN1U3fMfSD5JWDtot94iMqQjYUZhrargdn7biM8LlXf4RgcrZOeWbNmwcbGBt9++y2mT58OAHB3d8ecOXMwbtw4nQdIRFQqlo6Amy8QeQO48hPQYZq+IyIqU1VslE1c0UnpxZQ0Plo3b4lEIkyYMAHPnz+HTCaDTCbD8+fPMX78eIhEorKIkYiodN54XQt99rvSj+J6cpIjwcigubxOel4y6clH66QnNxsbG9jY2OgqFiKistGwN1C9FZCRChz/quTXubkT2Pw2sG+MzkKjPBRZQDFLHlHRqtgo+7RGJ6UVu3yUsdGoeatZs2Y4duwYHBwc4OfnV2SNTkhIiM6CIyLSCZEICFoA/NQFuLYFaD0KcGus3TUUWcDJJcrt/67rPEQCoFAA6wOU2yNOAiZi/cZTQWXX9KRlKJCUnglbCzM9R2Q4NEp63nnnHdUMxu+88w6bsYio4vFoBTTqA9zeDRyeCQzcrUyGNHVnDxDzQLmdGgOkyZTzAJHupCUAkTeV2xH/AtWa6TWcikpqLoaNxBRJ6Zl4mZTOpCcXjZKe2bNnq7bnzJlTVrEQEZWtwDnAvQPAk+PAo6NAnS6anadQACe/Vt8XFwq4N9V1hMYtM9e8Mi+uMukpBRdbCZJeZiI6MR21XKz1HY7B0LpPzyeffIITJ06UQShERGXMwRtoPVK5fXgmkJWp2Xn3/gRe3gUkdkCV10vjxHMOFJ2T5xpi/eKq/uKoBFyss0dwcYLC3LROel6+fIm33noLHh4e+Pzzz/Hvv/+WRVxERGXjjcmA1AF4eQ+4trn48goFcHKpcrvNp0DVJsrtuCdlF6OxysiV9KTJ9BdHJeDpaAkAePIyRc+RGBatk569e/ciIiICs2bNwuXLl9GsWTM0atQICxcuxNOnT8sgRCIiHZLaAwGv5+o5vhBITyq6/P2/gKhbgLkN0PpTwKGGcj+THt3LeJWzncnh1qVR11U5svphdDHfbyNToiHrDg4OGDFiBE6cOIGwsDAMGTIEv/zyC2rXrq3r+IiIdK/FMMCxFpDyEjizovBygpAzYqv1COVEh441le/j2Lylc7lrepj0lEodV2U/ngdRyXqOxLCUap6ejIwMXLlyBRcvXsTTp0/h6uqqq7iIiMqOqTnQ5fXSNedXA7LnBZd78LdyJmczK8D/9dw8qqSHNT06l7umJ4tJT2lk1/Q8jUmBPFOh52gMR4mSnuPHj2P48OFwdXXFkCFDYGtri/379+P580L+4SAiMjT1ewBe7ZQjho7Nz39cEICTi5XbrYYra3kAwPF181ZSBCBnfwmdyl3T8ypB2Z+KSqSqnQWsJabIVAgIj+P3NJvWSU+1atXQvXt3xMTEYP369YiKisLPP/+Mzp07c/4eIqo4RCKg6+vZmW9sA/67pn780VHlPjPLnFoeQJn8WNgrt+OflkekxiN3TU/sQ2BTL/3FUsGJRCJUtVPOzBwpY61ZNq2Tnjlz5iAiIgK7d+/Ge++9p5q0kIiowqnWDPD5QLl9eFbO8geCAJx4XcvTYlj+ldnZxFU2cic9ABB2Rj9xVBKutsqkJyqRw9azaZ30DB8+HPb29mUQChGRHnT+EjC1AJ6eBu4fVO57chx4cUW5v+24/Ocw6SkbuZu3qNSyk55IJj0qperITERU4dl7AG3+p9w+MgvIygBOvB6x1WIYYFPAAA2O4CobeWt6qFRcbV9PUMikR4VJDxFR+wmAlQsQ+wjYNQJ4dgEQSwqu5QFY01NWWNOjU252rOnJi0kPEZGFLdBhunL79i7lz+aDAduqBZdnTU/ZYNKjU1Vssvv0sCNzNq2TnlOnTiEzM/96NZmZmTh16pROgiIiKnfNBgPO9ZTbYnOg3WeFl81OemTPOImeLrF5S6dUNT0y1vRk0zrp6dixI+Li4vLtl8lk6Nixo06CIiIqd2JToPtSZcLTdixgV63wslbOgLk1AAGIDyu3ECs91vToVA0nKwDK5q3YZCbnQAmSHkEQCpyPJzY2FlZWVjoJiohIL2p2AL6IADrNKrqcSJQzSSH79ehORgE1EpygsMTsLM1Qy0X5e/n6swT9BmMgTDUt+O677wJQTng0ZMgQtfl5srKycOPGDbRt21b3ERIRlSexhv8sOtYEIm8y6dElRf6uE1BkAibm5R9LJeHn6YDHL1MQEh6Pzg24VJTGNT12dnaws7ODIAiwsbFRvbezs4ObmxtGjBiBX3/9tSxjJSIyHBzBpXuFJT1UYq1rKJdP+f3KcySn81lqXNOzYcMGAIC3tzcmT57MpiwiMm5MenSvoARHyCr/OCqRt5u6Y/XxRwiLTcVfNyLwQUsPfYekV1r36Zk9ezYTHiKi7KQnnsPWdYY1PTonMRUjoK5yGZUwLjyqfdITFRWFgQMHwt3dHaamphCLxWovIiKjkJ30JIQrZ3Gm0isw6WFNT2m520sBAP8lcOi6xs1b2YYMGYLw8HDMmjULVatW5crqRGScrN0AUymQ+Uo5X092EkQlx5qeMpGd9LyI5zxIWic9Z86cwenTp9G0adMyCIeIqIIwMVEOW4++o+zXw6Sn9Jj0lIlq2UlPApMerZu3PDw8IAhCWcRCRFSxcDkK3cp6neA41MjZx+atUstOeiIT05ClMO7f31onPStWrMC0adPw9OnTMgiHiKgCcfBW/uQILt3IrtV5Z7VyZuzc+6jEXGwkEJuIkKUQ8DLJuGdm1rp5q1+/fkhNTUWtWrVgaWkJMzMzteMFLVFBRFQpcdi6bmUnOCZmgJklkCVnTY8OiE1EsDIXIzEtEyly404itU56VqxYUQZhEBFVQEx6dCs7wRGbAiavfz2xpkcnpK+Tnldy404itU56Bg8eXBZxEBFVPKq5ep4qf2GbcNqOQoVfABxrAdYuhZdR1fTkSno4OaFOSM2U3830TON+nholPYmJibC1tVVtFyW7HBFRpWdXXdkUkyUHEl8A9p76jsgwPT0DbOwBSGyB6c8KL6d4Pd+RCWt6dM3iddLzSm7cC7hqlPQ4ODggIiICVapUgb29fYFz82Svvp6VZdxZJBEZEROxsjNz7EPlCC4mPQV7eFj5M73oP5rVa3pej7Nhnx6dUCU9Gcb9PDVKev755x84OioXLTt+/HiZBkREVKE41nyd9DwBagboOxrDJNJwoHB2gsOaHp3Lbt76fOe/cLGWYO+YdrA017qHS4Wn0ScOCAgocJuIyOixM3PxRLn6OgkCUNhM/gX16WFNj05YmCkTz4TUDCSkZuDY3Wj0auKu56jKX4nTvNTUVISHh0Mul6vt9/X1LXVQREQVBpOe4uXu4J2RCpgXsmh1Fvv0lBWpuXone4mp1tP0VQpaJz0vX77E0KFDcfDgwQKPs08PERkVzspcPCFX59m0xMKTntw1Pdm1Q0x6dCK7T09h742F1qneZ599hoSEBFy8eBFSqRSHDh3Cpk2bUKdOHezbt68sYiQiMlyOr5dMiHuibLqh/OSpOduFdWZWKAC8fn5is5zaITZv6YQ0T5KTnmmco7i0run5559/sHfvXrRo0QImJibw8vJCly5dYGtri0WLFqFHjx5lEScRkWGy91TWSmS+ApIiAduq+o7I8MiTcrbTCkt6ctXomIg5T4+O5a3ZMdb5erSu6UlJSUGVKlUAKIeyv3z5EgDg4+ODkJAQ3UZHRGToxGaAvYdym/16CpaenGtbVnCZ7Dl6APbpKQP5anoyjLOmR+ukp169erh//z4AoEmTJvjhhx/w4sULfP/996halX/hEJER0kdnZkEAZC/K736lIU/J2daopsc0V/MWkx5dyNuROY01PZoZP348IiIiAACzZ8/GwYMH4enpiZUrV2LhwoU6D5CIyOCplqMop87MWZnAbwOA5Q2BSz+Wzz1LQ567pqewpCfXL2G1pMc4fznrWt7RWsZa06N1n56PPvpItd28eXOEhYXh3r178PT0hLOzs06DIyKqEMqzpkcQgL8mAw9ej6A9PBOo2QFwrlP29y6p3ElPsTU9IvU+PUx6dCJvTY+xdmTWqqYnIyMDtWrVwt27d1X7LC0t0axZMyY8RGS8ikp60mTKl66cXQFc3QBABLjUBzLTgD2jDDs5yN2np7BnkXuOntw/2bylE/lHbxnw96UMaZX0mJmZIS0traxiISKqmHLP1ZM9bD05Gvjrc2BpLeXr98HA4+Ovh2aXgCAAF9YBR+co33dbAnz0h3IRz+eXgXMrCz4vPQkIO6d8Rd/VbFj9f9eLT9Ru7gSenNQs9tx9egpt3so1Rw/AeXp0zCTPLNhpbN7SzOjRo7FkyRL83//9H0xNjW/dDiKifOy9AIiUv9DjngDXtygTlIxc89Pc2aN8OXgDzQYB9XsBTrVzFtYsyqsEYN8Y4O6fyvdtRgOtRyq331oE7B2tTIZu7QJ8PwBaDMuZAHBDdyDyRs61HGsCVZsoE7SqTYBe36kvC3HrD2DnMEDqCHSaAbT4OP+yERH/An98rNyeo0EtVu7mrdS4gsvkTXrYkVmnopPUKyyMtaZH66zl8uXLOHbsGA4fPgwfHx9YWanPrLlr1y6dBUdEVCGYWQB21QHZM2CtP5CVrtxfrTnQeTZg6Qhc3QTc2A7EPwWOzVO+JLaAe1PAvZmyqcrKWVlW6qisoUkIA+LDgEvrldsmZkDXr3ISHgBoGqxMQq78rExuIm8AsufKmiB5ak7C41RbOdor7klOM1zEdaDDdPW5hW78rvz5Kg44MEl5jXbj1D/v0zM521mZgLiIXyUKhXpNz62dAATgnbXK56Yq9/qXsDhP85ZgnDUSutbD1x3LjjxQ1fAYa58erZMee3t79O3btyxiISKquBxrKJOerHRlAtNpFlC/R04tSY9vgC7zlLU917cCz68oa4ZCTylfxbH3BN7fqEykchOJgO5fAwHTgNPfAhfWKJungJzkxsIeGHtV2bfm4d/KpOjsd0BqLJD4IifpkacAT04ot5sNAkI2A0dmAa4NgdqBOfeMeZCznSYDrJzyxysIyutF/AvVTMvZbv0B+LwP1OumTAJjHwNmUuUx9ukpE9Xspbg6swt+uRCGxQfvIS2DNT0a2bBhQ1nEQURUsbUaoeyM22ywsonJpIC1jcwtgaYfKl9ZmcDLu8CLEOC/EGWNTmqssvknNVZZ1t4LcPACXBooa3ek9oXf38oJaDpAmfS8vKdMOuIeK4851VL+lFgDjV//0Xp3f07SgxbKfY//UXaMdvAGeq1Ufp5/fwPu7FVPeiL+zdlOSyg46dnzP+Dfrer7On+prOEClAlZcjSwrp168xebt8qMlcQUFq+HrrOmR0OdOnXCrl27YG9vr7Y/MTERvXv3xj///KOr2IiIKo4GvZQvTYlNATcf5av5YN3E4FQbEJkoE5GUlzk1PY618pe1dVf+zD3B4f3Xw+DrdVfWIHm2USY9ydE5ZTLSgMhbOe/TZEBSlLK2ysIOqN1ZmbDkTXg82wLtJyrvd+UnZQ3P5f9TT3iAktf0ZHcQ16SPlBGTvB7FxXl6NHTixAnI5fJ8+9PS0nD69GmdBEVERCVgJlXWDsWHKmt7YvPU9ORmV135M/F10iMIwMPDyu163ZU/rV2VP5Ojcs6LuqW+ZETKS2D3p0CMcqZ+VG8JDNiec9zGHfjkCGDtpkykqvoq90feVMYIABK7nOUp8tX0FNEMc+F75edo0BPY/LZy7bNPz6j3FSI1FmbZNT1s3irSjRs5vf/v3LmDyMhI1fusrCwcOnQI1apV0210RESkHZf6r5Oe+7lqemrmL2f7+t9r2XPlz9RYZQIDEVD9dXOXtXKdRbWanvin6tc5Oicn4RFLlMPn/3ndhCU2BybcVq99cXi9Kn34eeVPpzpAna7KZjkgf01PVq4EK7eo28ChqcrtWbHA09d/dD86qkyCqEASU2UyeeO5DGcexqB9HeOaY0/jpKdp06YQiUQQiUTo1KlTvuNSqRSrVq3SaXBERKQll3rK2Zpf3s+p6SmoecvuddKT+J/yZ3ZZu+o5nYpVNT3RyuYjExPlqLLcou8of777o7Kpav8E4OrGnGvlbW7KnYCZWgBvLQb+u5azLzvZMbdW/szb/JXtVULO9vUtOdsv7zLpKUL2chSyVxn46KeLOD65A2o4WxVzVuWhcdITGhoKQRBQs2ZNXLp0CS4uLqpj5ubmqFKlCsTiAjruERFR+XGpp/z5XwiQ/LpG3qmgmp48zVuxj16XzZUgWb2u6VFkKPsJWToWnoR4tVUmLPsnQjVaK7sJTe2+1QCvdsrk6f2Nyvvlnsk6O+mxsFX+LGySxNxNbH/mGlKvy9mvK6Hsmp5sT2NSmPQUxMvLCwCgKOlsokREVPayk54XV5U/pY6A1CF/ueyOzEkRypFkqqSndk4ZU3Pl+a/ilP1lLB3Vl5TIzaqKsnz1lsDzS8p9dp75y5mYAEMOKLezh/NnJzhATl8eC3vlz6jbypmsa3VUv05ha3hlphe8nwAAUnP1mjexiaiQkpWT1t3cN23ahAMHDqjeT5kyBfb29mjbti3CwsJ0GhwREWnJua76+4I6MQPK/jompsrJ/5Ijc5KevE1heTszF1TTY+mkTHgAwC84Z392YpWXSKQ+y7MkV9IjNlPf918I8Etv5TIaT04C/xeoXE4jbzNbNiY9RbK1MNN3CHqlddKzcOFCSKXK9t7z589j9erVWLp0KZydnTFhwgSdB0hERFqQ2OQ0XQEFd2IGlDUqNrmGrWc3MeWu6QHyd2bOTjYkdrnKuOZs+7yvHosmLHJdK2/zVrYN3ZQjtJ5fBo4vLHwNLyY9RbK3NFd7/8rIJinUesj6s2fPULu28n+KPXv24L333sOIESPQrl07dOjQQdfxERGRtlzqAYmvR2UV1Ik5m101QBaunEm6sOHthdX02FUDomXqZQDlml99fgD+3Qb4faRZvGrNW9lJj13BZQHlJI2FNW+lvFQOv8+7XhgBAOyk6jU9xjYzs9Y1PdbW1oiNjQUAHD58GF26dAEAWFhY4NWrV7qNjoiItJfdrwcovHkLyGl+en4FyHylTDjsvdTLqGp6Xic92X16bHNNUWLjpn5Ok/7AoD3KPkCayJ3gZNc0SWwLLgsoR3bFhxZ87PExYFtwwccI5qbqv/ZT5caV9Ghd09OlSxd88skn8PPzw4MHD9C9u3ISq9u3b8Pb21vX8RERkbZyJz2FNW8BOYlL6EnlTwfv/IuH5h62DqjX9OQtU1K5ExxPf+XPomp6Ul4CN3cUfvz+AWXn7MxXmjexGalXRpb0aF3Ts2bNGvj7++Ply5f4448/4OSkXHPl6tWrGDBggM4DJCIiLTlrmPRkDynPnmsnb38eIH/zVnafntz9hnSZ9NQMUP7M26cnt6ISnmw/dgAWVQe2fAAs8lSuQk/5GFufHq2THnt7e6xevRp79+7FW2+9pdo/d+5czJgxQ6fB5bV48WKIRCJ89tlnqn1paWkYPXo0nJycYG1tjb59+yIqKqrwixARVXZuPsoh5O5+RS9SaptnFv0Ck57XzVuhJ4EdQ5QLogLqNT02pUx6TEyAkaeB4f/kNJWZl7KGJvKm8ufDv5VLXBS1nIURM7aaHq2btwAgISEBly5dQnR0tNq8PSKRCAMHDtRZcLldvnwZP/zwA3x9fdX2T5gwAQcOHMCOHTtgZ2eHMWPG4N1338XZs2fLJA4iIoMnsQbG/5sz/Lsw3u2AKo0AeZJyqHvzofnL5B52fnt3rv25kh7LAlZZ11ZV9X/bdb5wqEmJft1VesZW06P1t+DPP/9EcHAwkpOTYWtrC1GuHvJllfQkJycjODgYP/74I7766ivVfplMhp9++glbt25VLY2xYcMGNGjQABcuXECbNm0KvF56ejrS03OGNSYmFjIKgIioojK3LL6M1AH437miyzjXBdqNB85+p74/d9JT2lqZ8lBcAmhEqtlL8SJBOfDI2Doya51KT5o0CcOGDUNycjISEhIQHx+vesXFxZVFjBg9ejR69OiBwMBAtf1Xr15FRkaG2v769evD09MT58+fL/R6ixYtgp2dnerl4eFRJnETEVV4IhHQZR7gl+cPWqk94NsfqPEm4N60jO6tw6WNWNOjsuWT1qptDlkvxosXLzBu3DhYWmrwV4QObNu2DSEhIVi0aFG+Y5GRkTA3N4e9vb3afldXV7VV4POaPn06ZDKZ6vXs2TNdh01EVLnk7axsbg28+wMw+M+cpSN0TWKtu2sx6VHxdrbC/N6NAQCp8kw9R1O+tE56goKCcOXKlbKIJZ9nz55h/Pjx2LJlCywsLHR2XYlEAltbW7UXEREVIfdcPCamgKmk7O+Zd66eej1Kfi02b6mxNFMmqq8yjGs9Ta1T3x49euDzzz/HnTt34OPjAzMz9S/S22+/rbPgrl69iujoaDRr1ky1LysrC6dOncLq1avx999/Qy6XIyEhQa22JyoqCm5ubgVckYiISkRt1mXr8pnxOGAqsG+MctvSCWjzqXIOnpJgTY8aqfnrpMfIanq0/hYMHz4cADBv3rx8x0QiEbKydNc+2LlzZ9y8eVNt39ChQ1G/fn1MnToVHh4eMDMzw7Fjx9C3b18AwP379xEeHg5/f3+dxUFEZPRy1/SU14R/fh8ph92/igeqNMhZHyybY838+wpjwpqe3Fxtla0nT16mQBAEtUFJlZnWSU/uIeplzcbGBo0bN1bbZ2VlBScnJ9X+jz/+GBMnToSjoyNsbW0xduxY+Pv7Fzpyi4iISiDv+lrlQSQC3HL9Dkh8oX48+SXQuC9w64/ir1VW/Y4qqMbVbGFuaoLYFDlCY1JQ00WH/acMmI4nQih/y5cvR8+ePdG3b1+8+eabcHNzw65du/QdFhFR5ZI76dHXRH+mefp2vv0dINLw1xj79KiRmIrRtLo9AGDZkQf6DaYclSjpOXnyJHr16oXatWujdu3aePvtt3H69Gldx1agEydOYMWKFar3FhYWWLNmDeLi4pCSkoJdu3axPw8Rka6Z5Uo45Cn6iSF3gjPumrKWR9Okh3168vn4jRoAgP03Ioxm6LrWSc+vv/6KwMBAWFpaYty4cRg3bhykUik6d+6MrVu3lkWMRERkSPSV9Ai5uldkr/2lcdLDmp68Ahvk1N4lpRlHh2atU98FCxZg6dKlmDBhgmrfuHHjsGzZMsyfPx8ffvihTgMkIiIDk73Senlzqg24+SonRjQ1V+7TOOlhn568xCYiWEtMkZyeieT0TLjYlMM0BHqmdU3PkydP0KtXr3z73377bYSGhuokKCIiMmCCnppCTMTAiJPAoH05+zQddcQ+PQWysVDWfSSlZeg5kvKhddLj4eGBY8eO5dt/9OhRLudARFSZtRim/Nl2rP5iMDFRT3S82ml4HpOeguQkPWzeKtCkSZMwbtw4XL9+HW3btgUAnD17Fhs3bsR3331XzNlERFRhvbUE8HkfqN5S35Hk8O2vbOJybQR8377wcuzIXCAbC2UyaCw1PVp/C0aNGgU3Nzd8++23+P333wEADRo0wPbt2/HOO+/oPEAiIjIQpuaAV1t9R6HOxARo0h/ISCu6nJhJT0GsJazpKVafPn3Qp08fXcdCRERUMsV1aGZNT4GMrXlL4z498fHxWLVqFRITE/Mdk8lkhR4jIiIqc8WNzmKfngLlNG8x6VGzevVqnDp1qsAVye3s7HD69GmsWrVKp8ERERFphDU9JWLL0VsF++OPP/Dpp58WenzkyJHYuXOnToIiIiLSSnFD19mnp0Bs3irE48ePUadOnUKP16lTB48fP9ZJUERERDplVk6LpFYw2R2Zk+VMetSIxWL8999/hR7/77//YGJS4dcvJSKiyog1PQWyep30HLgRgVsvZHqOpuxpnKX4+flhz549hR7fvXs3/Pz8dBETERERlYPsmh4A6LnqjB4jKR8ap75jxoxB//79Ub16dYwaNQpisbKnfFZWFtauXYvly5dzwVEiIqIKxNrCuGrANP60ffv2xZQpUzBu3DjMmDEDNWvWBKBciys5ORmff/453nvvvTILlIiIqEQ0XZTUCFlJmPQUasGCBXjnnXewZcsWPHr0CIIgICAgAB9++CFatWpVVjESERGVnKmFviMwWNZMeorWqlUrJjhERFRxmEr0HYHBMrakh3V+RERUubGmp1DG1rzFpIeIiCo3MZegKEzemp6MLIWeIikfTHqIiKhyE7N5qzBiE/WZrNMysvQUSflg0kNERJUba3o09opJT+FiYmJw4MAB7Nu3DxEREbqKiYiISHeY9BRpdMdaqu00OZu3CvTHH3+gdu3amDt3LmbPno1atWphw4YNuoyNiIio9EyY9BTl86D6cLIyB8CaHpXk5GS193PnzsWlS5dw6dIlXLt2DTt27MCMGTN0HiAREVGpiM31HYHBszBTrrLApOe15s2bY+/evar3pqamiI6OVr2PioqCuTm/WEREZGDYvFUsqfnrpEdeuZMejQfo//333xg9ejQ2btyINWvW4LvvvkO/fv2QlZWFzMxMmJiYYOPGjWUYKhERUQkw6SmW9HVNT2UfvaVx0uPt7Y0DBw7gt99+Q0BAAMaNG4dHjx7h0aNHyMrKQv369WFhwQmgiIjIwLB5q1hSNm8VbMCAAbh8+TL+/fdfdOjQAQqFAk2bNmXCQ0REhok1PcXKXm1d9ipDz5GULa3mn/7rr79w9+5dNGnSBP/3f/+HkydPIjg4GN26dcO8efMglUrLKk4iIqKS4eitYrlYKydwjElK13MkZUvjmp5JkyZh6NChuHz5MkaOHIn58+cjICAAISEhsLCwgJ+fHw4ePFiWsRIREWnO1Uf5s+mH+o2jAnC2UTYBfnvkAVb/81DP0ZQdkSAIgiYFnZyccPjwYTRv3hxxcXFo06YNHjx4oDp+584djBw5EqdPny6zYMtKYmIi7OzsIJPJYGtrq+9wiIioJObYKX861wWGHADMrYGEcKBKff3GVQFsPBuKOX/eUb2/O+8t1YguQ6bt72+Na3qsrKwQGhoKAHj27Fm+PjwNGzaskAkPERFVMrbugHUVwNySCY+GnG3U1yeLTamczVwaJz2LFi3CoEGD4O7ujoCAAMyfP78s4yIiIqJykt2nJ1tcilxPkZQtjTsyBwcH46233sKTJ09Qp04d2Nvbl2FYREREJaRZrw3KxSVfTY+RJz2Asl+Pk5NTWcVCREREelDFVr3LSmxy5Ux6SrXKOhERkeFhTY+2rCXqdSBxxt6nh4iIiIxDDGt6iIiIDJjp6yaamh30GkZlEBqTou8QyoRWfXqIiIgM1pgrQOgpwOd9fUdSIVmai5H6epX1x9HJeo6mbLCmh4iIKgd7D8AvGDDlAqMlsWd0O7zVyA0A8CQmBXuuvdBzRLrHpIeIiIhQ19UG6z5qhsbVlDMbT/njBhLTKtcCpEx6iIiICAAgEomwa1Q7AIA8U1HpFiBl0kNEREQq5qYmqskK0zIUeo5Gt5j0EBERkRqpmXKx0VcZWXqORLeY9BAREZGa7KQnjUkPERERVWYW5kx6iIiIyAhYmCrTAzZvERERUaUmfV3T80rOpIeIiIgqMVWfnkyO3iIiIqJKzCI76WFNDxEREVVmFhyyTkRERMaAQ9aJiIjIKFiYVc7RW6b6DoCIiIgMiy5renaFPIfYRIQOdavAztKs1NcrDdb0EBERkRoribJOJCkts9TXmrf/DsZvu46XyWmlvlZpMekhIiIiNQ5WyhqZhNSMUl1HEAQkvlJew9ZCv7U8AJMeIiIiysPB0hwAEJciL9V1UuRZUAjKbVspkx4iIiIyMNlJT0Jq6ZKe7Foec7EJJKb6Tzn0HwEREREZFFVNT2mTnrTXTVtSU4hEolLHVVpMeoiIiEiN/es+PWkZilKN4MruCG0tMYzB4kx6iIiISI2NxBSmJsqamfhS1PZkZCnX7jITG0a6YRhREBERkcEQiUS55uop+aKjWa97MYtN9N+0BTDpISIiogKYve54nJKeCXkJV1tn0qOFRYsWoWXLlrCxsUGVKlXQu3dv3L9/X61MWloaRo8eDScnJ1hbW6Nv376IiorSU8RERESVg5lYmaj0XHUGAV8fVzVVaSM76TFl0lO8kydPYvTo0bhw4QKOHDmCjIwMdO3aFSkpKaoyEyZMwJ9//okdO3bg5MmT+O+///Duu+/qMWoiIqKKL3c/nAhZGp68TCmidMGykx4TA0l6DKM7dSEOHTqk9n7jxo2oUqUKrl69ijfffBMymQw//fQTtm7dik6dOgEANmzYgAYNGuDChQto06ZNgddNT09Henq66n1iYmLZfQgiIqIKyDxP5+OY5HTUg41W12BNTynIZDIAgKOjIwDg6tWryMjIQGBgoKpM/fr14enpifPnzxd6nUWLFsHOzk718vDwKNvAiYiIKhjzPJMJvoh/pfU1soTXNT0GMEcPUIGSHoVCgc8++wzt2rVD48aNAQCRkZEwNzeHvb29WllXV1dERkYWeq3p06dDJpOpXs+ePSvL0ImIiCqcvMPMXySUIOnJrukRG0bSY9DNW7mNHj0at27dwpkzZ0p9LYlEAolEooOoiIiIKiezPIlKqlz7FddVfXpY06O5MWPGYP/+/Th+/DiqV6+u2u/m5ga5XI6EhAS18lFRUXBzcyvnKImIiCqPvDU96SUYtp7JPj2aEwQBY8aMwe7du/HPP/+gRo0aasebN28OMzMzHDt2TLXv/v37CA8Ph7+/f3mHS0REVGnk7dNTkrl6FAY2T49BN2+NHj0aW7duxd69e2FjY6Pqp2NnZwepVAo7Ozt8/PHHmDhxIhwdHWFra4uxY8fC39+/0JFbREREVLy8NT0lSXoymfRobt26dQCADh06qO3fsGEDhgwZAgBYvnw5TExM0LdvX6SnpyMoKAhr164t50iJiIgql7x9etJLMDmhQmDSozHh9cMqioWFBdasWYM1a9aUQ0RERETGQSc1PVnZSY9h9KYxjCiIiIjIoOikT092TY9hVPQw6SEiIqL88s7InJ6ZpfU1cvr0GEa6YRhREBERkUHRRfMWl6EgIiIig5cv6SnFKuuGsuAokx4iIiLKx8xUPVFhTQ8RERFVSnn79JQm6TGUIetMeoiIiCgfnfTpMbB5epj0EBERUT72lmZq70uy9hZreoiIiMjgta/trPa+JDU9stQMAICNxDDmQmbSQ0RERPnUdLHGBy2qo5G7LQAgKT0TWy6GFVhWEASsO/EYh29HQhAE3P5PhufxqXiZnA4AcLQ2L7e4i2IYqRcREREZnKXvNUFyeiYaz/4bADBj9y309HWHnVS96evmCxmWHLpX6HWcrCRlGqemmPQQERFRoazzNE01mXsYALB9RBtUtZNiz/UXuBoWX+Q1nFjTQ0RERBXB7v+1RZ+159T29Vt/QePz3WwtdB1SibBPDxERERXJz9OhVOdXsTWM5i0mPURERFSsj9vXAAC837y6RuXNci2tLjEVl0lM2mLzFhERERVrZo8GmNS1LiSmYliYiRGdlIZH0ckY0MoT8aly/K9DbTR63eEZAHZ+2hYp8kxUsTGMWh6ASQ8RERFpQCQSwdJcmTbM79242PJWElM08bAv46i0w+YtIiIi0omtw1urtvOO+jIETHqIiIhIJxytcoamW0kMox9Pbkx6iIiISCdyL1Ka3RRmSAwvIiIiIqqQajpbIbi1J5yszA1mkdHcmPQQERGRTohEIizo46PvMArF5i0iIiIyCkx6iIiIyCgw6SEiIiKjwKSHiIiIjAKTHiIiIjIKTHqIiIjIKDDpISIiIqPApIeIiIiMApMeIiIiMgpMeoiIiMgoMOkhIiIio8Ckh4iIiIwCkx4iIiIyCkx6iIiIyCgw6SEiIiKjwKSHiIiIjAKTHiIiIjIKTHqIiIjIKDDpISIiIqPApIeIiIiMApMeIiIiMgpMeoiIiMgoMOkhIiIio8Ckh4iIiIwCkx4iIiIyCqb6DsCQfH7yc5hbmUMEkWqf2rao+P25ZZep41AHQxsPhUQs0XXIREREpCEmPbmcfnEaYqlY59c9HHYYh8MOY+kbS1HbobbOr09ERETFEwmCIOg7CH1LTEyEnZ0dNl/dDKm1VLVfQM6jKewx5d6fXT73eWmZadh4eyPi0uIgEUswucVk9KvXr9DaISIiItJM9u9vmUwGW1vbYssz6YH2D01bMa9iMPPsTJx9cRYA0KF6B8xrNw8OFg46vxcREZGx0Pb3NzsylwNnqTPWdl6LKS2nwMzEDCeen0DffX1x/r/z+g6NiIjIaDDpKScmIhMMbDgQv/X4DTXtauLlq5cYcWQEvr3yLTKyMvQdHhERUaXHpKec1XOsh209t6FfvX4AgI23NyL4r2CEykL1HBkREVHlxqRHD6SmUsxsMxPfdfwOdhI73I27i377++GPB38U2mGaiIiISodJjx518uyEP3r9gdZurfEq8xXmnJ+DSScnQZYu03doRERElQ6THj1ztXLF+q7rMbH5RJiKTHEk7Aj67uuLy5GX9R0aERFRpcKkxwCYiEwwtPFQ/NrjV3jZeiEqNQof//0xVoasRIaCnZyJiIh0gUmPAWnk1Ai/9/wd79Z5FwIE/HjzRww+OBjPEp/pOzQiIqIKj0mPgbE0s8TctnPxTcA3sDG3wc2Ym3jvz/ew7/E+dnImIiIqBSY9BirIOwh/9PoDzV2bIzUzFTPOzMDU01ORJE/Sd2hEREQVEpMeA1bVuip+6voTxvqNhVgkxsHQg3j/z/dxPfq6vkMjIiKqcCpN0rNmzRp4e3vDwsICrVu3xqVLl/Qdkk6ITcQY4TsCm7ptQjXraniR/AKDDw3GuuvrkKnI1Hd4REREFUalSHq2b9+OiRMnYvbs2QgJCUGTJk0QFBSE6OhofYemM01cmmBnr53oVbMXFIICa/9di2F/D8OL5Bf6Do2IiKhCqBSrrLdu3RotW7bE6tWrAQAKhQIeHh4YO3Yspk2bVuz5Zb3Kuq7tf7IfX134CikZKbA2s8aX/l+iW41u+g6LiIioXGn7+9u0HGIqU3K5HFevXsX06dNV+0xMTBAYGIjz5wtexTw9PR3p6emq9zKZcgbkdSP+gIW5ZdkGrCMDMOH1aC4B9w8m4D5+03dIVABz6UN89M1n+g6DiKhSSkxMBACNRzdX+KQnJiYGWVlZcHV1Vdvv6uqKe/fuFXjOokWLMHfu3Hz7p20fViYxknEb/eNsfYdARFSpJSUlwc7OrthyFT7pKYnp06dj4sSJqvcKhQJxcXFwcnKCSCQq9LyWLVvi8uWSLQ+h6bnFlSvqeGHH8u4vqFzufYmJifDw8MCzZ8/KtLmvpM9Tm/NK+jy12V/U8zX0Z6nNufxu6vY8fjd1dy6/m7o9ryJ9NwVBQFJSEtzd3Yssl63CJz3Ozs4Qi8WIiopS2x8VFQU3N7cCz5FIJJBIJGr77O3ti72XWCwu8ZdZ03OLK1fU8cKO5d1fULmC9tna2pbp/7wlfZ7anFfS56nNfk2er6E+S23O5XdTt+fxu6m7c/nd1O15Fe27qUkNT7YKP3rL3NwczZs3x7Fjx1T7FAoFjh07Bn9/f53ea/To0WV+bnHlijpe2LG8+wsqV5rPVlIlvac255X0eWqzX5PnW9b43dQtfjd1h99N3eJ3s3Qqxeit7du3Y/Dgwfjhhx/QqlUrrFixAr///jvu3buXr68PFa+ijWYzZHyWusXnqTt8lrrF56k7ZfksK3zzFgD069cPL1++xJdffonIyEg0bdoUhw4dYsJTQhKJBLNnz87XBEja47PULT5P3eGz1C0+T90py2dZKWp6iIiIiIpT4fv0EBEREWmCSQ8REREZBSY9REREZBSY9BAREZFRYNJDRERERoFJD2mlT58+cHBwwHvvvafvUCq8Z8+eoUOHDmjYsCF8fX2xY8cOfYdUYSUkJKBFixZo2rQpGjdujB9//FHfIVV4qamp8PLywuTJk/UdSoXn7e0NX19fNG3aFB07dtR3OBVaaGgoOnbsiIYNG8LHxwcpKSlanc8h66SVEydOICkpCZs2bcLOnTv1HU6FFhERgaioKDRt2hSRkZFo3rw5Hjx4ACsrK32HVuFkZWUhPT0dlpaWSElJQePGjXHlyhU4OTnpO7QKa8aMGXj06BE8PDzwzTff6DucCs3b2xu3bt2CtbW1vkOp8AICAvDVV1/hjTfeQFxcHGxtbWFqqvmUg6zpIa106NABNjY2+g6jUqhatSqaNm0KAHBzc4OzszPi4uL0G1QFJRaLYWlpCQBIT0+HIAjg33Ml9/DhQ9y7dw/dunXTdyhEKrdv34aZmRneeOMNAICjo6NWCQ/ApMeonDp1Cr169YK7uztEIhH27NmTr8yaNWvg7e0NCwsLtG7dGpcuXSr/QCsIXT7Pq1evIisrCx4eHmUctWHSxbNMSEhAkyZNUL16dXz++edwdnYup+gNiy6e5eTJk7Fo0aJyitiw6eJ5ikQiBAQEoGXLltiyZUs5RW54SvssHz58CGtra/Tq1QvNmjXDwoULtY6BSY8RSUlJQZMmTbBmzZoCj2/fvh0TJ07E7NmzERISgiZNmiAoKAjR0dHlHGnFoKvnGRcXh0GDBmH9+vXlEbZB0sWztLe3x7///ovQ0FBs3boVUVFR5RW+QSnts9y7dy/q1q2LunXrlmfYBksX380zZ87g6tWr2LdvHxYuXIgbN26UV/gGpbTPMjMzE6dPn8batWtx/vx5HDlyBEeOHNEuCIGMEgBh9+7davtatWoljB49WvU+KytLcHd3FxYtWqRW7vjx40Lfvn3LI8wKo6TPMy0tTXjjjTeEzZs3l1eoBq80381so0aNEnbs2FGWYVYIJXmW06ZNE6pXry54eXkJTk5Ogq2trTB37tzyDNtg6eK7OXnyZGHDhg1lGGXFUJJnee7cOaFr166q40uXLhWWLl2q1X1Z00MAALlcjqtXryIwMFC1z8TEBIGBgTh//rweI6uYNHmegiBgyJAh6NSpEwYOHKivUA2eJs8yKioKSUlJAACZTIZTp06hXr16eonXkGnyLBctWoRnz57h6dOn+OabbzB8+HB8+eWX+grZoGnyPFNSUlTfzeTkZPzzzz9o1KiRXuI1ZJo8y5YtWyI6Ohrx8fFQKBQ4deoUGjRooNV9KsUq61R6MTExyMrKyrcyvaurK+7du6d6HxgYiH///RcpKSmoXr06duzYAX9///IO1+Bp8jzPnj2L7du3w9fXV9W2/csvv8DHx6e8wzVomjzLsLAwjBgxQtWBeezYsXyOBdD0/3PSjCbPMyoqCn369AGgHGU4fPhwtGzZstxjNXSaPEtTU1MsXLgQb775JgRBQNeuXdGzZ0+t7sOkh7Ry9OhRfYdQabRv3x4KhULfYVQKrVq1wvXr1/UdRqUzZMgQfYdQ4dWsWRP//vuvvsOoNLp161aqUYVs3iIAgLOzM8Ricb7On1FRUXBzc9NTVBUXn6fu8FnqDp+lbvF56k55PUsmPQQAMDc3R/PmzXHs2DHVPoVCgWPHjrH5qgT4PHWHz1J3+Cx1i89Td8rrWbJ5y4gkJyfj0aNHqvehoaG4fv06HB0d4enpiYkTJ2Lw4MFo0aIFWrVqhRUrViAlJQVDhw7VY9SGi89Td/gsdYfPUrf4PHXHIJ5lyQabUUV0/PhxAUC+1+DBg1VlVq1aJXh6egrm5uZCq1athAsXLugvYAPH56k7fJa6w2epW3yeumMIz5JrbxEREZFRYJ8eIiIiMgpMeoiIiMgoMOkhIiIio8Ckh4iIiIwCkx4iIiIyCkx6iIiIyCgw6SEiIiKjwKSHiIiIjAKTHiIiIjIKTHqISC+8vb2xYsUKjcufOHECIpEICQkJZRYTEVVuTHqIqEgikajI15w5c0p03cuXL2PEiBEal2/bti0iIiJgZ2dXovvpAhMvooqNq6wTUZEiIiJU29u3b8eXX36J+/fvq/ZZW1urtgVBQFZWFkxNi/+nxcXFRas4zM3N4ebmptU5RES5saaHiIrk5uametnZ2UEkEqne37t3DzY2Njh48CCaN28OiUSCM2fO4PHjx3jnnXfg6uoKa2trtGzZEkePHlW7bt7mLZFIhP/7v/9Dnz59YGlpiTp16mDfvn2q43lrWTZu3Ah7e3v8/fffaNCgAaytrfHWW2+pJWmZmZkYN24c7O3t4eTkhKlTp2Lw4MHo3bt3oZ83LCwMvXr1goODA6ysrNCoUSP89ddfePr0KTp27AgAcHBwgEgkwpAhQwAACoUCixYtQo0aNSCVStGkSRPs3LkzX+wHDhyAr68vLCws0KZNG9y6davY+xKR7jDpIaJSmzZtGhYvXoy7d+/C19cXycnJ6N69O44dO4Zr167hrbfeQq9evRAeHl7kdebOnYsPPvgAN27cQPfu3REcHIy4uLhCy6empuKbb77BL7/8glOnTiE8PByTJ09WHV+yZAm2bNmCDRs24OzZs0hMTMSePXuKjGH06NFIT0/HqVOncPPmTSxZsgTW1tbw8PDAH3/8AQC4f/8+IiIi8N133wEAFi1ahM2bN+P777/H7du3MWHCBHz00Uc4efKk2rU///xzfPvtt7h8+TJcXFzQq1cvZGRkFHlfItIhgYhIQxs2bBDs7OxU748fPy4AEPbs2VPsuY0aNRJWrVqleu/l5SUsX75c9R6AMHPmTNX75ORkAYBw8OBBtXvFx8erYgEgPHr0SHXOmjVrBFdXV9V7V1dX4euvv1a9z8zMFDw9PYV33nmn0Dh9fHyEOXPmFHgsbwyCIAhpaWmCpaWlcO7cObWyH3/8sTBgwAC187Zt26Y6HhsbK0ilUmH79u3F3peIdIN9eoio1Fq0aKH2Pjk5GXPmzMGBAwcQERGBzMxMvHr1qtiaHl9fX9W2lZUVbG1tER0dXWh5S0tL1KpVS/W+atWqqvIymQxRUVFo1aqV6rhYLEbz5s2hUCgKvea4ceMwatQoHD58GIGBgejbt69aXHk9evQIqamp6NKli9p+uVwOPz8/tX3+/v6qbUdHR9SrVw93794t0X2JSHts3iKiUrOyslJ7P3nyZOzevRsLFy7E6dOncf36dfj4+EAulxd5HTMzM7X3IpGoyASloPKCIGgZvbpPPvkET548wcCBA3Hz5k20aNECq1atKrR8cnIyAODAgQO4fv266nXnzh21fj26vi8RaY9JDxHp3NmzZzFkyBD06dMHPj4+cHNzw9OnT8s1Bjs7O7i6uuLy5cuqfVlZWQgJCSn2XA8PD3z66afYtWsXJk2ahB9//BGAcgRZ9nWyNWzYEBKJBOHh4ahdu7bay8PDQ+26Fy5cUG3Hx8fjwYMHaNCgQbH3JSLdYPMWEelcnTp1sGvXLvTq1QsikQizZs0qssamrIwdOxaLFi1C7dq1Ub9+faxatQrx8fEQiUSFnvPZZ5+hW7duqFu3LuLj43H8+HFVYuLl5QWRSIT9+/eje/fukEqlsLGxweTJkzFhwgQoFAq0b98eMpkMZ8+eha2tLQYPHqy69rx58+Dk5ARXV1fMmDEDzs7OqpFkRd2XiHSDNT1EpHPLli2Dg4MD2rZti169eiEoKAjNmjUr9zimTp2KAQMGYNCgQfD394e1tTWCgoJgYWFR6DlZWVkYPXo0GjRogLfeegt169bF2rVrAQDVqlXD3LlzMW3aNLi6umLMmDEAgPnz52PWrFlYtGiR6rwDBw6gRo0aatdevHgxxo8fj+bNmyMyMhJ//vmnWu1RYfclIt0QCaVtACciqiAUCgUaNGiADz74APPnzy+3+544cQIdO3ZEfHw87O3ty+2+RKSOzVtEVGmFhYXh8OHDCAgIQHp6OlavXo3Q0FB8+OGH+g6NiPSAzVtEVGmZmJhg48aNaNmyJdq1a4ebN2/i6NGj7CtDZKTYvEVERERGgTU9REREZBSY9BAREZFRYNJDRERERoFJDxERERkFJj1ERERkFJj0EBERkVFg0kNERERGgUkPERERGYX/B2bSJweHWGI7AAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkcAAAHLCAYAAADC2TzJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy80BEi2AAAACXBIWXMAAA9hAAAPYQGoP6dpAADrUUlEQVR4nOy9d5xcdb3//zznTN/ZXrPZ9E5CCqG3hBaCYBSUK1dRyhWxe8H79crXa8Hyw65XERQV8PrFa0FAEARC6BBKCDEkpCfbe5vezjmf3x9nZ7bN7M7szmw9zzz2sVPOnPOZyezM67zL6y0JIQQmJiYmJiYmJiYAyJO9ABMTExMTExOTqYQpjkxMTExMTExMBmCKIxMTExMTExOTAZjiyMTExMTExMRkAKY4MjExMTExMTEZgCmOTExMTExMTEwGYIojExMTExMTE5MBmOLIxMTExMTExGQApjgyMTExMTExMRmAKY5MTEymFLW1tUiSxA9/+MPJXsq04frrr2fhwoWTvQwTkxmDKY5MTExGRZKktH6ef/75yV7qiLS1tfEf//EfrFy5EpfLRV5eHhs3buTb3/42vb29WT3WXXfdxf3335/VfZqYmEwMlslegImJydTn97///aDr//M//8P27duH3b5q1aqJXFZGvPnmm7znPe/B7/dz7bXXsnHjRgB27drFd7/7XV588UWefvrprB3vrrvuoqysjOuvvz5r+zQxMZkYTHFkYmIyKtdee+2g66+99hrbt28fdvtUpbe3lyuvvBJFUXj77bdZuXLloPu/853v8Otf/zorxwoGg7hcrqzsy8TEZHIw02omJiZZ4b777uPCCy+koqICu93OSSedxN133z1su127dnHppZdSVlaG0+lk0aJF3HjjjSPuWwjBJz7xCWw2Gw899FDi9mPHjnHs2LFR1/arX/2KpqYmfvzjHw8TRgCVlZX813/9V+L63/72Ny6//HKqq6ux2+0sWbKEb33rW2iaNuhxmzdvZs2aNbz11lucf/75uFwu/u///b8sXLiQ/fv388ILLyRSjps3bwbg/vvvR5IkXnzxRW6++WZKS0spKCjgYx/7GD09PcPWdtddd7F69WrsdjvV1dV85jOfSSsFqOs6P/3pT1m9ejUOh4PKykpuvvnmpMcwMTEZjBk5MjExyQp33303q1evZtu2bVgsFh577DE+/elPo+s6n/nMZwBob29ny5YtlJeX8+Uvf5mioiJqa2sHCZ6haJrGjTfeyJ/+9CcefvhhLr/88sR9F110EWAUcY/Eo48+itPp5IMf/GBaz+X+++/H7XZz66234na7efbZZ/na176G1+vlBz/4waBtu7q6uOyyy7jmmmu49tprqaysZPPmzXzuc5/D7Xbzla98BTAE2EA++9nPUlRUxDe+8Q0OHTrE3XffTV1dHc8//zySJAHwjW98g9tvv52LL76YT33qU4nt3nzzTV555RWsVmvK53DzzTdz//33c8MNN/D5z3+eEydOcOedd/L222+P+lgTk1mPMDExMcmQz3zmM2Lox0cwGBy23aWXXioWL16cuP7www8LQLz55psp933ixAkBiB/84AciFouJD33oQ8LpdIqnnnpq2LYLFiwQCxYsGHW9xcXFYt26daNuFyfZc7n55puFy+US4XA4cdumTZsEIH75y18O23716tVi06ZNw26/7777BCA2btwootFo4vbvf//7AhB/+9vfhBBCtLe3C5vNJrZs2SI0TUtsd+eddwpA3HvvvYnbrrvuukGvw0svvSQA8cADDww69pNPPpn0dhMTk8GYaTUTE5Os4HQ6E5c9Hg+dnZ1s2rSJ48eP4/F4ACgqKgLg73//O7FYbMT9RaNRrr76av7+97/zxBNPsGXLlmHb1NbWjho1AvB6veTn54/pufh8Pjo7OznvvPMIBoMcPHhw0LZ2u50bbrgh7X3H+cQnPjEoevOpT30Ki8XCE088AcAzzzxDNBrl3//935Hl/o/qm266iYKCAh5//PGU+/7LX/5CYWEhl1xyCZ2dnYmfjRs34na7ee655zJer4nJbMJMq5mYmGSFV155ha9//evs3LmTYDA46D6Px0NhYSGbNm3iAx/4ALfffjs/+clP2Lx5M+9///v58Ic/jN1uH/SYO+64A7/fzz/+8Y9Evc5YKSgowOfzpb39/v37+a//+i+effZZvF7vsOcykLlz52Kz2TJe07JlywZdd7vdzJkzJyH26urqAFixYsWg7Ww2G4sXL07cn4wjR47g8XioqKhIen97e3vG6zUxmU2Y4sjExGTcHDt2jIsuuoiVK1fy4x//mHnz5mGz2XjiiSf4yU9+gq7rgOGX9OCDD/Laa6/x2GOP8dRTT3HjjTfyox/9iNdeew23253Y56WXXsqTTz7J97//fTZv3ozD4Rjz+lauXMmePXuIRqOjCpne3l42bdpEQUEB3/zmN1myZAkOh4Pdu3fzn//5n4nnEmdglGmqoOs6FRUVPPDAA0nvLy8vn+AVmZhML0xxZGJiMm4ee+wxIpEIjz76KPPnz0/cnip9c+aZZ3LmmWfyne98hz/84Q985CMf4Y9//CMf//jHB23zyU9+kiuuuIKrr76ahx9+GItlbB9Z733ve9m5cyd//etf+dd//dcRt33++efp6urioYce4vzzz0/cfuLEiYyOGS+qTsWRI0e44IILEtf9fj8tLS285z3vAWDBggUAHDp0iMWLFye2i0ajnDhxgosvvjjlvpcsWcIzzzzDOeecMyXFm4nJVMesOTIxMRk3iqIARst9HI/Hw3333Tdou56enkHbAKxfvx6ASCQybL8XX3wxf/zjH3nyySf56Ec/Oixqk24r/yc/+UnmzJnDF7/4RQ4fPjzs/vb2dr797W+nfC7RaJS77rpr1OMMJC8vb8SW+3vuuWdQ3dXdd9+NqqpcdtllgPHcbTYbP/vZzwat5be//S0ej2dQ195Q/uVf/gVN0/jWt7417D5VVbPuBm5iMtMwI0cmJibjZsuWLdhsNt773vdy88034/f7+fWvf01FRQUtLS2J7X73u99x1113ceWVV7JkyRJ8Ph+//vWvKSgoSERMhvL+97+f++67j4997GMUFBTwq1/9KnFfuq38xcXFPPzww7znPe9h/fr1gxyyd+/ezf/+7/9y1llnAXD22WdTXFzMddddx+c//3kkSeL3v//9MFE3Ghs3buTuu+/m29/+NkuXLqWiooILL7wwcX80GuWiiy7iX/7lXzh06BB33XUX5557Ltu2bQOM1Ndtt93G7bffztatW9m2bVtiu9NOO21EA85NmzZx8803c8cdd7Bnzx62bNmC1WrlyJEj/OUvf+G///u/07Y1MDGZlUxqr5yJicm0JFkr/6OPPirWrl0rHA6HWLhwofje974n7r33XgGIEydOCCGE2L17t/jXf/1XMX/+fGG320VFRYW44oorxK5duxL7GdjKP5C77rpLAOI//uM/Erel28ofp7m5Wdxyyy1i+fLlwuFwCJfLJTZu3Ci+853vCI/Hk9julVdeEWeeeaZwOp2iurpafOlLXxJPPfWUAMRzzz2X2G7Tpk1i9erVSY/V2toqLr/8cpGfny+ARFt/vJX/hRdeEJ/4xCdEcXGxcLvd4iMf+Yjo6uoatp8777xTrFy5UlitVlFZWSk+9alPiZ6enkHbDG3lj3PPPfeIjRs3CqfTKfLz88XJJ58svvSlL4nm5ua0XzMTk9mIJESGp0MmJiYmJmMmbsz45ptvcuqpp072ckxMTJJg1hyZmJiYmJiYmAzAFEcmJiYmJiYmJgMwxZGJiYmJiYmJyQBmrTj6xS9+wcKFC3E4HJxxxhm88cYbk70kExOTWcD111+PEMKsNzIxmcLMSnH0pz/9iVtvvZWvf/3r7N69m3Xr1nHppZealvomJiYmJiYmzMputTPOOIPTTjuNO++8EzCs9ufNm8fnPvc5vvzlL0/y6kxMTExMTEwmk1lnAhmNRnnrrbe47bbbErfJsszFF1/Mzp07h20fiUQGOffquk53dzelpaWjjgcwMTExMTExmRoIIfD5fFRXVyPLIyfOZp046uzsRNM0KisrB91eWVnJwYMHh21/xx13cPvtt0/U8kxMTExMTExySENDAzU1NSNuM+vEUabcdttt3HrrrYnrHo+H+fPn09DQQEFBwSSuzMTExMTExCRdvF4v8+bNIz8/f9RtZ504KisrQ1EU2traBt3e1tZGVVXVsO3tdjt2u33Y7QUFBaY4MjExMTExmWakUxIz67rVbDYbGzduZMeOHYnbdF1nx44dicGTJiYmJiYmJrOXWRc5Arj11lu57rrrOPXUUzn99NP56U9/SiAQ4IYbbpjspZmYmJiYmJhMMrNSHH3oQx+io6ODr33ta7S2trJ+/XqefPLJYUXaJiYmJiYmJrOPWelzNB68Xi+FhYV4PB6z5sjExMTExGSakMn396yMHJmYmJiMBV3ohNUwQTVIWA0T0SIU2AoodZYiS7OuhNPEZMZiiiMTE5PxocVADYMaNX4LbeTtRwtWSxIgDb6c6C6Rht8vySBbQLGCbDV+Z2rQquugRUGLGM9DixCM+gkJre9HJSxUwkI3jjdg/93hbhr9jZQ5y6h0VWJTbJkd28TEZMphiiMTE5ORiYseNdInHiJ91/tu00cRQ5OBbAHFBoqlXzDFBZTQEwLIeE5RQ+ABYS1KV8xHV9RLRI8l37ckgaSArPQJMQVVVmjtbaBVtlDsKKYyr4p8R5FxTIvTWIeJiUmCQESlJxhFliQssoQiS1hkGUWRsCoSdosyqesz/2JNTGY7ahTU0ADRM0AMqWFDTEw3dNX4SaFvBhLTNXpiPrpiPvxqCEmLYQ/24NRjhArmGJGigQgBom//amTY/np8LfR0vItLsVNhK6KkcjVK8aIsPbHcIoRA1VU0oeGwOCZ7OSYzDFXT6QpEafdG8EfUlNsVOq2cVD25Nb2mODIxGSveZuipNb48FWtfpMI+4LJtyOVJ+nPT9X7xEwsNiPpkMfIT6oG2fdD2LkR9xj6F3v9bDL0+/D6ha2gIhMWOUOxg6Xs9LXawOIzfA65LFnvfjwNZMa4b29gG/O7bh2zpT4XpGnqgA39vHSFvPaq3GXuwm5pgN/ZgF7awN/G0YrY8PBWr6K06CW/FCjSrM+2XJKhFqA21UdvkQ452o0gKVtmKIisokoJFtmCRLDitTortxSjyxJ4pa7pGT6QHT8RDRIsQ02NEtSgAdsXO2vK1E7oek5mLNxyj3RuhOxBF06dHD5gpjkxMMkWNQudhQxAAoPWlZYIjP06SkwimFJdHGYo4CCGM1JAahlh4sPiJhY37sk2gs08M7YPWfeBtHPcuJXL3gSQkCdH32krRILLQKABSnZtqih0hSVijAcoad1HWuAtdkvGVLsFTtZreqpOI5JWld/BYEF2NoisWYilSdXVSHaWOUspd5eRZ88b0HNNBFzq9kV66Q930RnoRJP+iimgRYloMq2LN2VpMZi4xTccbiuENq/QGo4Rj0y/6bLbyZ4jZyj/LCXRB15FEjUrOSNTMWIcLJ10dLH4mIvXlbzNEUNs7xm9/65ANJCheCFVrIK/CqMeR5P7anPhlSR5ynwySTEho1IU7QejIWgxFiyKrMWQt2veT7HIMWY0a22oxJC2GrMf67jd+pBRf/rqkEHUVE3GV9P2UEs0zfkdcJai2PBA6+d0nKGp9l8K2d3H62wftI+SupLfqJALFCwi5K4jklSFSRQeL5oOzKK2X2mVxUeYso9RZikXOnlxsDbTS5G9CT/O9srRoKcWO4qwd32TmoukCXziGJxTDG1JHTJmlQghBqzfM/mYvR9p8rJtXxBe3rMjqOjP5/jbFUYaY4miWomvQfQJ8LZO9ktwjhPE82/ZB6zvG70DH4G0kGUoWQ+UaqDwZKleD3T2mw+lCZ7+vnrCe5QiXEEi6NkAwGSJKszqIOgqH1xKNgt3fQVHbuxS17sfddRx5iMgQSETySgm7Kwi7ywm5Kwi7KwgVVKEVzjUEUgZISMx1z2WOe05Gj0tGe7CdOm9dRo+pyqtiXv68cR/bZGaj6YLd9T2oWuZSosNniKH9zV7ebfHSHej/DFhSnseOL27O4kpNnyMTk+wS8UHHIaNeZ6bia4Xm3dC23xBEoe7B90sKlC3tE0JroGIV2LKT/mmK9BAWmhEtE2J0K4B0kSSEYkFTLCPXCklSf1RuYF0TAvztEA0AEHGX0+beRNuSTSixEIXtByloO4jT14rT346iRnAEOnEEOmHAXGtdVjhw/i0EC+dlZDEgEDQHmqlwVYyrHqk73J1SGPmiPg50HWB5yXKK7EWD7vNH/WM+psnsocMXyUgYCSF4p8nDw283cbDVN+g+iyyxtMLNKfOL2ba+OttLzQhTHJmYpEII8DRAb/3o3jzTmYOPwxu/Gpyaky1QttwQQ1VroHwljFaMbHcbKTWrs99/SJIxvInkfn+gxGWZgBqktetdYG7/foQwisiF2le0rRm/E5fV/oLu+OWE71F8JwN9kgbd0ZeiHFK4narGy55viGN/G0T7a8o0q5PuuRvonrshsWZrxIfD347D12789reT19uANRqgpHE3wcXng8018ms4BF3odIe7KXeVZ/S4OJ6Ih2O9x5LeF9Ni/HDXD2nwNSAhsaRoCadWnsopladQ5iwjEAugC900tzRJSTwVlu62u+t7efjtRo51GCccsgRLyt2sri5kdXUByyrd2C2K2a1mYjJliYWh8xAM6FyacegavPkbOPiYcb18JVRvMARR+Yq+6MkoWOyQVw7uiowjSbrQOeE5MfwOSQJFASbX5ySBPd/4CXsNkZQsgihJxBwFxBwF+MqWJm4ubdjF4t1/oKDjsNHFl6E4AmgPtY9JHAViAY72Hk15/x8P/ZEGXwMW2YKqqxztPcrR3qP88dAfWViwkKuXX82q0lU5LRA3md70BmOEoiNHenUheONEN4+83URdt3GCYVNkLlpVwRVrqynJm5qmqaY4MjEZiq8Nuo9NTXPDbBENwIvfh6a3jOunXAdrPphe2kdWwFVqCCJHUeZu1H00+5sJqdMoVekoMH5CHkMkqaOfMXvLlwPg8jSheFrQ3IOHWzf6GumN9LKmbE3KfQRjQQKxQEYiJaSGONxzOGXx9a7WXTzX8BwAn1v/Oebmz2V3227eanuLwz2HqfXW8tCRh9iycIspjkxS0uIZ+W9gX5OH+1+tpanX+Dt3WGW2nFTFe06eQ6EzdSfkGD9SsoopjkxM4mgx6Do2vPh4puFrhWe/aaQLFTuc90VYcPboj3MUgrsS8soMgTQOgrEgLYFpWtzuLDREUthrdO0lMYKME3MUEMyvwuVrpaD1HXqq1oBiIaSGePjIw+yo34FA8LkNn2NDxYaU+2kPtrOoMD0jyagW5VD3IVQ9ecdQe7Cd+/bfB8Bliy7j5PKTAbh4wcVcvOBiTnhO8K3XvkWjv9GoOzK1kUkSglEVTyh51244pvGHN+rZ/q5RfJdnU9i6poqtq+fgdqSWHZIEVQUOaorT9xPLFaY4MjEBw7Oo88iIX3QzgvYD8Ny3IewBZwlc9DUoXZp6e6vTiBDlVYA1O47JQojk6bTphCQZIslZaLxnogHjJxYc9h7yli8zxFHHYbojPnb7jvOHA3+gJ9KT2ObJE0+OKI66w93My5+XVmt/g68hpZ+Sqqv86p+/IqSGWFK0hCuXXjlsm3n581AkhYgWoc5Xx9Ji4/0hNA1JmSKpTpNJJ1XU6GCrl1++cIw2r/F3sOWkSj502jxctpHfuyV5NuaXuHDapsZ7zBRHJrMbXYfeWvA0TfZKcs/x5+CV/zaKmEuWwIVfNaJAQ5Et/XVEjuwXRbYEWgiqoxhmTifiztyuEuO6FjWKt6MBiPrxli+n6vhLBLqO8LN3fsU/ew8DUOGs4H1L38e9++7lSO8RjvceZ3HR4qSHiBdmV7gqRlyKP+qnO9yd8v4HDz/ICe8J8ix53Lz25qRiyyJbqMqrosnfxAnPCaJaFJtiQw+FUdxmGMnEMHns9A0+CYiqOn95q4HH97YggNI8GzdvWsLJcwtH3JfLprCwNI9C19QyHDXFkcnsJRowWvT7WrVnLEKHPQ/A3j8Z1+efBed+cXAkSJLAWWykzZwlmTl0Z0AwFqTJP8OFqGIDpy1h+tgD3HfwL9xd5CDUexhFUrhs0WVcsfgKbIqNfV372Nm8k6frnuaTRZ9MuduOYMeo4qjeV5/yvj3te3i67mkAbjz5RsqchjBeWbKSRl8j/lh/6/5c91ya/E00+5vxx/yUKCWIcAhMcWQCtHnDDJwCcqzDz93PH0vUFm1aXs7HzlowYrTIqkjMK3FRkW9HmgpFRkMwxZHJ7MTbbJg6TsehqpmghuHln0Ldy8b1NR+EUz423ACxaAEU5dbwTwjBCe/kptPqvHU4FAeVeZWjb5wFjvUe43eHfk9jSREAq5V8/nXjF6gu7o8QXbrgUnY272RX2y46Q50J0TKUoBrEH/XjtiU32+wOdxOIDRf6mq7xZtubPHDgAQAunn9xIoVXZC8i35afEEitQcP5fK7bsFZo8jcRiAYocZSgh9Nr2TaZ2ei6SKTMAHYcaOPeV06gCyhyWrnpvMWcsiC1s/rAuiKLMnVtIkxxZDK7GDYXbQYT7IZnv2WMO5EtcNZnYenFw7ez5UFhTc6X0xpoJRibvHTa2+1v8/O3f46ExAXzLuD9S9+fUmiMF2/Uy4OHH+TlJkOU5iPzfzo6OK+wmBNDur/mF8xnVckqDnQf4Jm6Z7hm5TUp99seak+6Zl3oNPoGz7eLqBFebnqZp+qeojPUCWC06K+4OrFNPBIlSRLzCuaRb8vnuOc41W7DgK/J34QvZhj1idA06iw0SRt/RMVpVVDk9KI3XYEoUdU4qYyqOg+8Xo8u4MzFJdx4ziLyHanTY0UuKwtKXaPWH00Fpv4KTUyyxUTNRZsKdB+HHd+EYKfh0bP5K4aZ41AkCcqW5bx3NqSGJjWd1hZo4zfv/AYwnKefbXiW11tf56plV7GpZlPWjA51ofNC4ws8dPghAqoRxTmn+hxukIo548RdxKJHDSuAIS39ly68lAPdB3ix8UW2LdmGy5rcD6k71M38/PnDaoXag+1ENONs3hf1saN+B8/WP5tIlbmtbi6eb3SjWWXjy8uhOCi0D64HKXIUsdq6mp6wcfLQ4m/BH/WjxaKI2Cz4u5llRFWdgy1eHFaFFVX5WNOI5LQOKMR+u6GHUEyjNM/G5y5chpzic8RhlVlYmkfxFPU0SoYpjkxmPrpmiAXf0GGpM5T61+GlHxgptYIaoyOtIIUVf8FcQzzlkHh3WqoJ8Lkmoka4c8+dhNQQS4uWsm3JNv506E80+Zv4/bu/54WGF/jwqg+zvHj5uI5zvPc4vz/w+8Sojvn587l21bUsLV6K1FOHptixRgM4u44TKlkMA4bUnlx2MtV51TQHmnmx8UW2Ltqa9BgCQVeoa1BaMKbHaPY3A4Zv0h1v3JHwjyp3lrN14VbOnns2dmWwqWeq+iW7Ymdj5UasspWoHqU92E7A38P0+VozSQchBEfb/cQ0QUxT2d/sZWVVPg5r6m4xbzg2aKjsq0e7ADhnaVlKYVRd5GBesQs5zcjUVMEURyYzm7DXSKPN5LlocYSA/Q/DW/cBAuash01fTj0Q1uo0ao1yTFuwLWktzEQghOD+/ffT5G+i0FbIp9Z9imJHMatKVvFcw3M8cvQR6n31fPeN73LmnDO5evnVGU+i90f9PHjkQV5qfAmBwGlxctXSq9g8b3NiJppwFOIrW0JR27sUdhwitODMRMG2LMno6GxZuIX799/P9vrtXLzg4pRt+x2hjkHiqMXfgtY3j+4vh/9CSA0x1z2X9y55L6dWnpo0KqZISsraJoA8ax5z8uZQ76unOdCM39pFSUavikmu6A1GiWo6Ffnjs9Zo9oQH+RSFohr7mz2sqCrAbU/+3hsYNQpEVHbXGxHGs5eUJt2+qtDBgtLpWcRviiOTmYkQhsmhp2Fmz0WLo8Xg9bvhiNGNxPKtcMYnjVqjVJQuzVlXWpywGh5WCzORPFP/DK+3vo4iKXxq/acSwkeRFS5ecDGnzzmdh488zIuNL/Jay2u83f42Vyy+gi0LtyTST0PRhY436qUr1MVxz3EePfZoQvydU30OH1z+wWHpKmx5eMuXUdT2LgUdh2mN+MBZRLnTGAvSEergrDln8dCRh+gJ97CrdRdnVp+Z9PghNYQv6iPflk9YDdMWNIz2Dvcc5p3Od5Almc+u/+yIRedlzrIRh9laFStz8+dS76un0ddIQOmmRDLl0VSg3Rehy29Mrx+rQPJHVBq6h9f/RVXBu81elle6KXINjhWGYxrdgWji+hu13ai6YF6xM6kAKnPbWFia+bicqYIpjkxmHrGQ0aIf8Y2+7Uwg4oPn/z9ofcfoQjv132DVtpHriPKrEpGLXDHZ6bTDPYf586E/A/AvK/4ladqswFbAdauvY/O8zTxw4AGO9h7lr0f+ykuNL3H54svRhEZXuIvucDfdoW66wl30hHsSkZo4Ne4arj3p2tSpOcWKt2oN7Psb7q7jSMFunKXLmV8wn55wDx2hDqyKlQvnX8gjRx/hqbqnOGPOGSlbnDuCHeTb8hPCUwjBQ0ceAuDcueeO2o03miUAGMXbO5t30uxvJmjpgXxTHE02MU1PCJRj7QEkJMrz05iBOABNFxxp86U8Z9R0wcFWH4vL8waJrzZveNBjXjlqFPmfvXR4BLLQaWVJuXtKtuiniymOTGYWs2Eu2kA8TfDs7YY1gdUJ538Jak4b+TGKDYrTG0UxHtqD7YO8cyaS3kgvd++5G01onF51OhfPT9KlN4AFBQu47fTbeK3lNf58+M+0h/pHbCRDQqLYUUyJo4TTq07ngnkXjBiJAQiVLSNmz8ca8VHQdZz5K96PLMkU2PqNNi+YdwFPHH+COm8dh3oOsbJkZdJ9dYe7KQmXJFy293ft53DPYSyyhW2Lt424jgJbAQ7L6BGHRQXGe6TJ34RqDxLNi5Edj3STsdLpjwwSKMc6jL+vTATSic4A4djIFiZCGOIrqurUFLvQdEH7ANPH7kCUd5uNodznDEmpue0WVlTlT7sao6GY4shkZqDFoOsoBDoneyUTR8teI2IU9RvjPS76GhQvHP1xpUsHFQPngrAaptE/Oek0VVe5e8/deKIe5rrncsPqG9I6g5UkibOqz2J9xXqeOP4E73a/S6GtkGJHMaWOUkqcJcZvRwlF9qJRxdAw7G685csobdzNvN5mHH22BlbFil2xE9Ei5NvyOXvu2Tzf8DxP1T6VUhwJBEd7jxqXB0SNLph3ASXOkSM8la70PJ6WFS0DDAsG1RUiGAuSfb90k0zoGOJKLYQhkCQJytyjC6QOX2TYPkaioTtEVNVx2hRUrV+V7TzWhQBWVOZTPiC65LDKrJyTn7YtwFTGFEcm05/ZMhdtIIefgtfuAqFB+Qq44L8Mh+vRyCuDvOTFk9lCCEGttzblRPhc8+dDf+ZI7xGcFiefXf9Z7JbM0g5Oi5MPLP8AH+AD2V2YLQ9P+XJKG3fjbHvX8KEqmg9Avi2fSKhvFtWCLbzQ8AL/7PgnLYEW5uTNSbq7eLpyd/tuar212BU7ly+6fMQl2BX78HqoFNQU1CREW4faQ4U6C5oapjCBiEogMjwiLgQcbTciSCMJpHBM40Rn5o0Rbd7IsAz9K8eMk9BzlvZ/ltgsEqvmFKRlBzAdmBnPwmR2ouvQdQxa980eYaRr8OZvYefPDWG0aBNcekd6wki2GDPVckxHqANfdHLqvV5reY1n6p8B4OMnf3zCnLDTQrGi1ZxqXO46YlhL9Hlu5dv67RSq8qpYV74OgO2120fcpS50Hjn6CACXLLiEAvvIsZ0KV0XadSBOi7PfDFLtJKyaDtmTSfsIEZ+4QOryJ98m3rav6WOr/xuYymvuDXGiM4AiSZyx2BBHFkViZVXBiDYA0w0zcmQyPYkGoOOgMeBzthALwos/hMY3jOvrPwJrr0nfwLFkEVhGdqsRQqALHdH3DwE6OkL03TL0d/xy33VNaDT4Gsb5RMdGo6+R+/ffD8Dliy8fccr9ZGCRLSyoOdfwnvI2QutemLsR3OWD6o7AMIXc07GHV5pf4f3L3j/s/jivt7xOk78Jl8XF1oWGN1Kpo5RCeyFhLUwoFjJ+qyFkSR6xfX8odsXOXPdcTnhO0Kh2wiQV1psYIztSCZ84QsCRvghS6ZAIUmNPCF9YTfawjIlHjdbWFFLQ54a9rMJNXor2/+nKzHo2JjMfIYzi457amT8XbSDeZnj22+CpB9kK5/67ETVKF2eR0aGWhJAaotZTO2nF09kgGAty5547iWpRVpeu5sqlV072koaxuHAxtlgU5qwzxFHzHjipB9zl2BQbNsVGVDM6kZYXL2dhwUJqvbX85K2f8L4l72Nd+bpBUR9VVxNRo8sWXZZw1Z6bP3eY4aMQgpgeS+mdlIy4OAJo0mZRLd8UpCcYJaaNLk7jAkmSJEr63Kg9oRiNPdlJiQohEl1q5/R1qdksEoXO1CNDpitmWs1k+qBGoG2f4XY9m4RR01vw+C2GMHKWwNbvZiaMJNkowh6CEIJmfzP7O/dPa2GkC53fvPMb2oPtlDpK+cTaT6QcByIxOYWi1e5qo9bHUQDV640bW/5p1Mv15SwGptYkSeLq5Vdjk23Ueev42ds/4/adt/NW21uJWq6Xm16mI9RBga0g0Y1X5iwbJozi+7MpmXlcS5LEwoKFAH2RI5PJomOUqNFAhIDDbT66A1Fimp6oR8oGxzoCtHkj2C0yG/uGyxY6bdO6ZT8VZuTIZHoQ6DS60WbDXLQ4QsD+h2D37wwxWL7CmJHmytBvpniB0eY/gEAsQK2nlqA6/dOSjx9/nD0de7DIFj69/tODRMZQypxl6Oh0hbombH0FtgKq8/rGt1jsUHO6IVi9jeBtMroN7fkU2AoGrWtV6Sq+d/73eLr2aZ5teJZ6Xz2/2PML5rrncvmiy3ns2GOAkUKMF53Ha4SyxeLCxQC0aT3E9OykZUwyI6Jq9AYz+9yLC6Q8myUxJDYbxKNGpy4oTtQXFbtmXtQITHFkMtWZbXPR4qgRePVncOIF4/qyLXDGp0DJ8IPI7jbmp/WhC50mfxOtgen/eupC5/WW1xOppWtXXcuiwpH9m8qcZTgsDrwRLzE990JblmQWFS4afGZdOBdKl0HnISN6NO8MsOcnFXWF9kKuXnE1WxdtZXvddnbU76DJ38Q979wDQImjhM3zNgNQ6ixNGjUaD1X2clySnaCI0BzrYG1W926SDp3+6JhM/oVg0By08aLpgp3H+2epgVHuONRJe6ZgiiOTqUvEbxRdz4a5aAPxt8Nz3zHMLCUFTr8JVlyefuF1HEkyvoT7HueNeqn11Camt09XPBEPLzW9xAsNL9AVNj6sz685n/Nrzh/xcU6LE7fNmDO3oGBBwicol8x1zx2eznIUGnPvOg9Byx6IeAGjTsgqW5OKtnxbPlctu4qtC7fyTN0zbK/bTkANcNWyqxJjThLRqSziVCXmWso4EmuiIdKW9f2bjE4mvkS5ZH+zB08ohttu4eQaww6iwGGdEZ5GyTDFkcnUxNdmpNFmU20RGDVVz98BYQ/YC2Dzl6FqjOfrBXPB7kbVVRp9jXSEOrK71glEFzoHuw/yfMPzvN3+dmJ8h8vi4vya87ly2egF2AM7tYodxRTbixMO07nAZXElN1x0FEL1OnjnT0ZRdtibuKvAVpAQfEn3aXWxbek2Lll4Cd3h7kTBdKmjNC3X60yxxgRzlVJDHEVNcTTR+MIxQtGp4fYfT6mdubgUS99MxuK8mZlSA1McmUw1dL0vjdYy2SuZeA49Aa//yvAvKllsGDu6R5+BlRSrC4oW0BvupdZbOyEppFzgi/p4pekVXmh8ITFgFWBJ0RI212zmtKrT0i40LnUMNr9cULAAX5cPNUe1NAsKFiQvVLXYoXoDKHYI90LnYaODzeog35Y/ojiK47Q4E8IIYI47uVHkeLHGdGosxnBcM3I08YzkbTSRRFWdN2uNE4lzB8xSK56hKTWYQeKotraWb33rWzz77LO0trZSXV3Ntddey1e+8hVsNltim0WLhtck7Ny5kzPPTD4B22QCiYWh44CRTptNaDF441dw+Enj+sLz4JwvwDgiAbHiBTR4a9P6op1qCCE40nuE5xueZ1frLlRhiBeH4uCs6rPYVLOJ+QXzM9pnkb0I65B6LatiZV7+PE54TmRr6QnKneWJFF5S8sqhcjU07zbqjpZfmhBHmVLiKMFpcY6+4RiwRnXmWgxRaUaOJhZNF4khs7lE9BU0jdRxtru+h1BMo8xtY1ml8b522pQZZfo4lBkjjg4ePIiu6/zqV79i6dKl7Nu3j5tuuolAIMAPf/jDQds+88wzrF69OnG9tDS34xRM0iDYbZxBz6ZuNDBauZ+/A9rfBSQ45TpY84HM64sG0GVzUh9oyFlEJFcEY0FebX6V5xuepznQnLh9QcECNtds5ow5Z4w5dVTuLE96e5mzjK5QF96oN+n9Y8EiW6jJrxl5o3jdUfNuo+4o7AV3BQ6LI2XdUSpSjRfJBlIkynyr4Y/VHusmpIZyJsRMBtMdiA6aZ5YLmntDfO3RfeTZLJy6oJiNC0tYUTl8NtpAbyO577NppnapxZkx4mjr1q1s3bo1cX3x4sUcOnSIu+++e5g4Ki0tpaoquSGeyQQjBHgaoKduslcy8XQegee+DcEusObB+f8H4uMlxkBUj1EX7aXXVg1jHBMwWUTUCN967VuJ1JlNsXFG1Rlsnrd51A600bDK1hHniS0sXMi+zn1ZmwU3L3/e6GaLjkLD7+gtoPUdCHUn7sq35dMd7k750IEU2YsS5o/ZRug6eiRCubWI8qiDqqYgx/e/wup1F+fkeCaDmYhC7BcOdxCIaAQiGk/sa+WJfa247RY2zC9i44Ji1tUUoeqCtxt6AThnSX9KbaZ2qcWZMeIoGR6Ph5KS4Z4w27ZtIxwOs3z5cr70pS+xbdu2lPuIRCJEIv1vUq83e2eYsx5NNTp2gul9Ecwojj0Lr/4c9BgU1sAFXzVavMdIR8RDQ7gDrXAeKNMv1P3343+nLdhGoa2QK5ZcwVlzzsral36ps3TElIFdsTMvfx513vEL9HxbfnojOix2qFhlFN1HvNCyF6pPAcWSkTgaWHeUbUQ4DMJIP27scHPjH/1EX/gGPGeKo1wTjml4QrmPou+uN+qItq6pIhBRebu+F39E5aUjnbx0pBOrIlFV6ETTBfNLXMwrMf4mLYpEgWNGy4eZK46OHj3Kz3/+80FRI7fbzY9+9CPOOeccZFnmr3/9K+9///t55JFHUgqkO+64g9tvv32ilj17mK1t+roGb90H7z5iXK85Hc77D7CNTQiEtSi1oTZ8aggcReBMb+L6VKI10MqTtUa91cdWfyzrM9HSESvlznK6w93jHpg7Pz+DWihnMRTMgQ6vETmKeMFVknbdUaG9MGdRIwA9bAyatck2FvqNVJq3JLs+SibJmYioUbs3TGNPCFmCD2yowe2woOmCQ20+3qrtZlddD+2+CA3dhlHsOUv6y0+KnNYZ6Yo9kCkvjr785S/zve99b8RtDhw4wMqVKxPXm5qa2Lp1K1dffTU33XRT4vaysjJuvfXWxPXTTjuN5uZmfvCDH6QUR7fddtugx3i9XubNmzfWp2MCs7dNP+KDF75n1JgArP2QMTw2xaiLkRBC0BbpoTHcZQyIlRTji3aaIYTgDwf/gCY0Ti47mfXl67O6f7fVnVaNTHxUxr7OfcbrOQaq8qoyEyv2AogXbUd8CXHktDjTqjuqcY9S1zRORMg4cbEoVuZ6ja+KtoLpla6djgghMhoXMlZ21/cCsLwyH3dfFEiRJU6aU8BJcwq49swFNPaEeKuuB08oxpbV/aUoMz2lBtNAHH3xi1/k+uuvH3GbxYsXJy43NzdzwQUXcPbZZ3PPPfeMuv8zzjiD7du3p7zfbrdjt5tnS1lhNrfp99Qag2P9rUYX2rm3wIJzxrSroBahNthGQAv331gwJ3P37CnAno497Ovch0Wy8OGVH8762WgmU+gdFgc1+TU0+BoyPo5NsWVuwugoBHtflCjiNwRSH26re0QPprnuuTmNGsHgyFGZxxBFdc5ATo9pAt6wSiSW+xPHeEotPiNtKJIkMW9AKq3/diia4cXYMA3EUXl5OeXlyTtNhtLU1MQFF1zAxo0bue+++5Dl0c/I9+zZw5w50++Me9oxW9v0AepehZd/DGoY3JVw4VeheOGYdtUd9XEsOERc2t2Zz1ubAkS1KP978H8BuHThpVTmJTFMHAeyJFPiyOx1qXRV0hXuIhjLbObc/Pz5KHKGtV5Wh5EKBWO+WsRnNChIEvm2/JTiyGVx5bRDLY4eMsSRVbFS0Gu0lF913s05P+5sZyJSaqGoxrstRv3sKfOTi6NUuO0WrMrMn1k/5cVRujQ1NbF582YWLFjAD3/4Qzo6+t2A451pv/vd77DZbGzYYNQ0PPTQQ9x777385je/mZQ1zxpma5u+0GHPH2DvH43rc9bB+f9pTGYfAzFdpS7UPvhGSYaC3KZXcsWTJ56kM9RJsaOYKxZfkfX9FzuKMxYskiSxqGAR+7v2p/2YQnshxY7MvmASuPoiWxGfUY/WN4R2pLqjYbPacoSIGOLIIllQeoyoVsXiNTk/7mxmoryN9jb1oumCqgIHcwozs8cozpv5KTWYQeJo+/btHD16lKNHj1JTM/jLQgyY2vetb32Luro6LBYLK1eu5E9/+hMf/OAHJ3q5s4PZ3KYfDcLLP4KG143rJ70PNt4ImUYXBlAbakcVQ0YJuCvBMv0+rDpDnTx+4nEAPrTiQ4mp8tkklbfRaLisLqrd1TT7m0fdVkJiQcGCMR0HgPy+aFm0L6Ia9oI9H6fFiUW2DPOqmoh0GoCIRhGq8V4Tuo7UY0QZrHOzP7/NpJ8ufwRtAmw4dtcZUclT5hdlLLRnur9RnBkjjq6//vpRa5Ouu+46rrvuuolZ0GxnNrfpe5uM+iJPA8hWOPuzsOSice2yM+qlNzYkJWl1QV76NTVTiT8e/CMxPcbKkpWcVnla1vdvV+xjcpuOMydvDhHVSG8osoIiKVhkCxbZgiIpKLKCRbJgla3DnLczIr8vPRZPN/fVHUmShNvqpjfSm9h0otJp0F9vBKB7vUiaBpKEtTK7qU+TwUzEuBBdF+zp8y06JUW9USrsVhmXbcbIhhGZHc/SZGKJ+KH9gFFjM9toegte/D5EA+Aqhc3/F8pXjGuXUT1G/bB0mmT4Ik3Ddtp9nfvY3b4bWZL5yKqP5CRFNNaoURxZkllctHj0DcdLfHZe3EIg0u+jlm/LHySOJiqdBqAH+2uu1E7DHZmiAiTr7IgaTAahqIYvnHtX+6MdfrxhFZdNYUVVZicQM3mW2lBMcWSSXWZrm74QsP8h2P0747mXr4IL/q/hZTNOakPtaENfz7xysE6/MQ4xPcYDBx4A4OL5F+fMxLDUOU1GAsULsuOdamrEaF6wOiiw9demTVQ6LY4e7PcfU7v65vOVjf+9bJKaiSjEhv4utXU1RVjSaFoayGxJqYEpjkyyha5D9zHwtU72SiYeNWy4XZ94wbi+7FI445NZaa3vjHrwxIa0T1vskFcx7n1PBttrt9MWbKPAVsC2Jamd6cdDob0QmzJNznDj4nlgF2fEC1YHTosTRVKwK/YJS6fFGRg50uKRo1JTHOWKifI2gn5/o0xTaoosUeAwxZGJSfrM5jZ9fzs89x1DGEoKnH4zrLgsK+kuI53WMfyOwrmQ4RnfVKA73M1jxx8D4OrlV+csEpKJt9GkExdHsYDRrSYrRhTJXYEkSRTYCqh2V0+oG7EQAhEakFaLR45McZQzPKEYUTX30faOPsdrSYJ1NZm56Rc6rcjy9EvjjxVTHJmMj9napg9GXdVz34awxzD023QbVGWv1flEsG14Os1V0u+qPM3486E/E9EiLC1aylnVZ+XkGBbZQpG9KCf7zgnOov7L0YBh8xD2JG5aULBgfAXfY0CEw4gBHVOJmqPS6eelNV2Y6JTaisp88jOMAs2mlBqY4shkrAgBvfXGz2zE0wA7vmF8oZUsgQu+0l9cmwU6Ih686hAjQkkBd1XyB0xxDnYf5I3WN5CQ+MiqjyCPYWRKOpQ6SnO275ygWI2uw1jQKMp2FBiXNRUUy4QLIxicUgPQ+iJHSsX4itxNkhPTdLomwNsI+sVRpsaPAIWmODIxGQUtBh2HIJR6vMGMJuyBZ243hFH5StjybWMkSJaI6DHqw0nSae4KUKbfn6yqq4ki7M3zNo/PF2gURkup6eEwsYYGZJcLyeUyfttskztEMy6I4mlpIQyhlIVi/oGIWCytbjM91F+MLYRIRI7sFWYbfy7o8EUQEzCyLhTVeLd57K7YdsvYPdqmI9Pvk9Zkcon4oP3g7GzTB6Ob6NlvGjPS3FXGKJAsCiOA2mAb+tB0msU+bT2Nnmt4jiZ/E26rmyuXXpmz47isrlHrmPRgELW7B7r7hb2kyEhOJ3KfWJJdLmSnE8kyQR+PjkKjkSE6oGYv7M26ONJ8PpSiIqRR6tX0QH/kSASDiPiMNVMc5YSJ8DYCeKfJg6oLKgvsVBdl9pk1G2apDcUURybp42uFrmOzr00/jtDh5Z8YUTObGy7+hvHFlkXaI73D02kABdXT0tPIE/HwyNFHALhq2VW4c1gvlY630cAW9ThC0xH+ALp/cFegbeFCrJUT0BVo73sPDRg8O9DvKFuIWAzd70cpGHl8jR7sfx3iUSM5Px+Ha+ymmibJ8YZjhKLa6BtmgYEptXQipTaLRKHTSqHTNuvqjcAURybpMJvb9Aey+3+g7mWQLUaNUWF2Z5pF9BgN4c7hd9gL+qe3TzMePPwgITXEgoIFnF9zfs6OIyGlNWR2YBfWaGi9PRMjjoZ6HcUv9w2hzRqqiub1jSiOhKoiov3NFfFONUtZGVZ59n1B5pp278REjXQheDvuip0ipSZLUOC09gkiK3n22S0PZvezNxmd2dymP5DDT8K+B43LZ38Bqk7O6u6FEMnTaZJkRI2mIUd7j/JK8ysAXLvq2pwWSpc4SrDIo3+cDaynGXVbrxehaUhKjmst4h1rA9NqumbUtNmzF2kTmoYeCACpjTeHFWP3RY6U0tK0Xl+T9FE1na4J8jY61u7HG4rhtCqsHOKK7bIpLCzNI99hmVWt+qMxjdo6TCacYDc0v20Ko6bd8NpdxuV1H4YlF2T9EO3RJN1pYDhhT8PBsrrQE0XY5849lyVFS3J6vHQcsYWuD5oZNvr2As2b/fTWMOLiaOjfWZZTa0LV0P1+hJ46LT5UPA6MHJlkl05/lAmYMQv0Gz+um1eIRRn8tV/sslHoml0eRulgiiOT4QgBPXXQth/03M/6mdL01MILdxj1RosvgHX/mvVDhLUojcnSaYrVEEfTkBcaX6DOW4fT4uSDyz6Y02PZFNugURupEKEQZPhlpPX0jm1RmRBPq0V9g28PZ1mYaSpCF+j+1Cc7A4uxob/myFI6TcaxTCPafRPX1DJSC3+efXZ1oaWLKY5MBqPFDFE0W/2LBhLshh23QywElWvg7M9nvShaCEFtKEk6DYyJ7fL0++DyR/08dOQhAK5ceiUF9tGFy3goc5alVWCaSUotjtbbi8h1n3UicjREHGU7cqQZhb+6z5dym5RpNTNylFX8EZVAZGIKsTt8Eer7XLHXzysadv9sry1KhSmOTPqJ+KB5z+z1LxpILGy07Ac6oKDGKMDOgSFfe9SDT03ypW1zDXZPnkY8dOQhArEANe4aLpiX/RTkQNxWNxWu9IqmxyKORCzWV6eTQ5x9heTRIcdRI8ZPlhCq8WWspRBHQ8eGwIC0mhk5yipt3omLGr3dYHyeL68Y7oqtyBJ2iykDkmG+KiYGvlZo2Tt7/YsGomvw0g+g66jRKXbR13PSLRbWojQkm50mSZCfm2n1uabWU8sLjcYA3o+s+ghKDiNfZc4yVpSsSLuLKlkbfzpovb1jelzauPrE0dDIEWQ3taYZKXLd50tadzR0bIgeiSSiTGbNUfbQdEGXf2IcsQF218VTakXD7nPZlMk1QJ3CmOJotqPr0HnE+Jmt/kVD2fVbaHgdZKth8liQ/YnoQghOhNoQyYpgnCVgc2b9mLmmNdDK79/9PQLBmXPOZEXJipwda17+PBYVLsqoAy6TNv6B5LzuKG72mEwcRbPXDBFPqwldJI2GpRobIjkcSK7cDAmejXT5I2gTVIkdjmnsj7tiL0hWb2Sm1FJhvjKzmVgY2t8dHs6fzRx4DA48alw+74tQsSonh2mL9uJPlk6TFHBPHyfiJn8Tb7W+xa62XTT6GwGwK3auXn51To4nSzJLCpdQFC9iThOhaeiRsZ2t68EgejSKbMtR16CrL2UV9SfxNsrel6hQ+5srdJ8PJX9wNHRYp1q8GLssvZouk/SYKEds6HfFrsi3M7do+AlXnm361TROFKY4mq0Euw2n59nejTaQhtfhzV8bl0+5Hhaem5PDhLQojaEk3WkA+ZVTen6aEIJGfyO7Wnexq20XLYGWxH2KpLCyZCVXLL6CYkd2R1+A0ZW2rGjZqCNCkjGWeqOBaD29yLkyhIyn1XTVqDGyDhjtkKVicKFpg3SW5vMxNBk5rFMtPnA2nlIzBdK4CURUfOGJ+8ztT6kld8V2mZGjlJivzGxDCKMTzexGG0zXUXjx+0ZqcdmlsOYDOTmMEIITwdbk6TSLvT+KMIUQQlDvq08IorZgW+I+RVJYXbaaUytPZX35+pyNB3Fb3SwtWjrmKfVDU0aZovX25s4t21ZgRAyFZrTzW7M7qw/6U2pxdJ8PIcSgL8yhaUdtSBt/OkNrTUYmG1EjbzhGVNUpybMhjyBYB7liJ0mpSRK4rGbkKBWmOJpNaDEjWmR2ow0m0AE7vmmctVdvgDM/lbOz5LZIDwEtRdH7FJqfJoTguOc4b7UZKbPOAZEui2zh5LKTObXyVNaVrxtTJCcTSp2lLCxYOC6HbTHOyJHu9eTOLVtRjIL/cK9RdzTQ2ypbNgLq4GiF0HT0QADFbYhZoarD0o5DDSBNcTQ+dF3QOU5H7C5/hP/z4F5CMQ2bIjOn0EFVoYPqIidzBvx22Swc7wjg6XPFXlU1vKHEaVVM48cRMMXRbCHig/aDZjfaUKJBw8so1A1FC2DTl43ZaTkgpEVpDHclv9NROOnz03Shc6z3GLvadvFW21t0h7sT99lkGyeXG4JobflanJaJKRivcdcwxz3+gvhMnLGTEXfLthRnP10I9IujLBZgD2Ro5AiM8ShxcZQssjZwdAiY4mi8dAWiqNr4xO7T77YRihn/l1FNp647SF338P+7IqcVW1+L/tqa4a7YYJo/joYpjmYDvlboOmZ2ow1FV+GF7xou2M5io2XflpeTQ42YTpMkw/BxklB1lYePPMyrLa/iiXgSt9sVO+vK13Fq5amcXHYydot9wtYkSzKLCxdnrXZpvGk1MFJrORVHkGRUT5ZqjtThdS6az5+oO0pWkzUscpSrgvRZwni9jaKqzrMH2wH494uWMb/URUtvmGZPiFaP8bulN0xvKEZvqH948GkLkw9kdtnMr/+RMF+dmYyuG7U0/rbRt51tCAGv/xKadxu1Phd9Hdy5m8DeOlI6bRLnp+lC59599/Jay2sAOC1O1pev59SqU1lduhqbMvHrGk/hdTKGTpofK2P1O9LDYWTHKHVECXE0pJ0/W2m1ZJEjf3/d0dBibKGqaD1G+t2sORo/oag27kLsV4524o+olLvtnLawBFmWmFPo5BQGC/ZgVKXFE6a5N4QQcNaS5HWMZhv/yJivzkwlFoL2A2abfir2PwSHnwQkOO9LULo0Z4cKahGaUqXTJnl+2kNHHuK1ltdQJIUb1tzAaVWnpW2qmAvyrHksK1o25sLrZIy3Uy2OiMbQ/AEUd/rRRc3nI3rsGI6TTx65Xik+YmVYWi2L3WpDb1M19EAQxZ033OOop8cQZlYrcoGxNjNyNHbGO0dNCME/9rcCsGV15Yi1Qi6bhSXlbpaUj9wc4TLb+EfENIGciQS7jTEgpjBKTu3L8NZ9xuXTPg7zz8jZoYx0WgqzR5jU+WnP1j/LEyeeAOD61ddzdvXZkyqM7IqdlSUrsyqMYOzO2MnINHoUa2hAj0SJNTePvKGj0Pidq8hRkrQagO7zJh8bEu9UKylBko2vCTNyNDZ0XdAxzi61Ay1eGrqD2C0ym1eMP8Jtt8pYk9QhmfRjvjozCSGM+pm2/aZ/USo6DsLLPzYur3wvnPS+nB6uJdJNMFU6zZY3afPTdrft5oEDDwDGcNhz5p4zKesYyFz33HF1pKVirM7YychEHKk9PWg+IxIUa2kZue7JkSpylB2SRY7AiGwNHRsCgw0gASRZQrKYiYax0B2MEhtnIfY/9hlRo/OWlePOQjosz6w3GhVTHM0UtJghinobJnslUxdfKzz7LdCiUHOaETXKIUEtnDqdJkmQX53T46fiaO9RfrX3VwgE59eczxWLr5iUdQzEZXVR4kheODpespVWA9ADAfTo6E7bQghijU0DboBobS0iVSQoVeQoawXZycWR7vOl16lmptTGTLt3fFGjdm+Yt+qN+q9LV2fHPd9MqY2OKY5mAhGfkUYz/YtSE/HDjm9A2AMlS+D8L+U0nRVPp6VkkuantQZa+dnunxHTY6wtX8tHV310SoyGqHHX5Gwd2RRHkF70SOvqGl7H4/OjdiQZNAwQH4cyNHKUtYLs5JFkoWqoncMFvOlxlB3CMQ1PaHzNAE+/24YQcPLcQmqKs9OkYBZjj44pjqY73hZo2Wv6F42EFoPnvwOeRnCVwUVfA2tuhUlzpJugluKMcZLmp3kiHn781o/xx/wsKljEp9Z+CmWS6p0GUmAroNBemJN9i2gUEctuink0cSR0nVhjY9L7Yo2NiFiSL8tE5GjiCrLjJHs+idEhZqfauBhvrVE4pvHcIaN9f+uaqmwsCTAjR+lgiqPpiq5Dx2GjVd/0L0qNELDz59D6jiGILvp6zkd0BLUwzanSaTAp89PCapif7v4pnaFOyp3lfOGUL0yob9FIzMufl7N9ZztqBIZ5otBT/82pHR0ph9yKmEq0IUnqO157lqOC7FRptVRoQ2uOzLRaxgghxj0u5KUjHQSjGlUFDtbPK8rKuiyKhMMcGzIqpjiajsRC0LLH9C9Kh71/hGPPgiQb7tcli3J6OF3oI6fTJmF+mqZr/PKfv6TOW4fb6ubWjbdSEG8dn2RKHaU5HT+SC3EkNB3N40lxn0asqSnpfXHUjk40r3fwjanSatkiRVotGULXUbsNd3QzrTZ2eoLGDLSxIoTgyb72/UtXV444Ry0TzKhRepjiaLphtumnz/HnYI/RkcUZn4K5G3N+yObwCOk0gIK5Ezo/TQjB79/9PXs792KTbXzhlC9QmTfxKb1kSEjMzZ+b02Nks41/IKlSa7GW1rTSeNHa2sHRJ2efkV8sOKTTNPdptaFoHo/R+i/LKH2O4KY4ypzxehu90+ShuTeM06pw/vLseaGZnWrpYYqj6YLZpp8Zrfvglf82Lq++ClZclvNDBtQwLZHu1Bs4CsGem6n1qXjs2GO82PQiEhI3r7uZJUVLJvT4I1HhqsCu5Da1p2exjX8gycSRiEZRW1vSerweCqO2DNh2oKXDwBOfrKXV0v/M0OL1RsXFCeNKM62WGRFVozc4vkLsJ/va9zctL8/qqA+XOVMtLUxxNB3QYtC2z2zTTxdPIzz3bUNELjgbNl6f80PqQudEqDX1BpMwP+2lppd45NgjAFy76lo2VGyY0OOPhCIpWRkoOxpinANnU+43GkMPDI7exlpaEFr6aZRYc3P/QFzFBvH04qC6o/GLI6FpaD4/HT/5CYHXXx91e7XNSAvHU2owuyJHvcEoxzv8dPojxDL4/xxIhy8yLl3b4gnxdkMvYDhiZ5Ns+CTNBmaUOFq4cCGSJA36+e53vztom71793LeeefhcDiYN28e3//+9ydptWkS9kLz2xDqneyVTA/CHthxu1G7UbYCzv2iUW+UY5rC3YS0EfxvJnh+2r7Offxu/+8AuHzR5Vww/4IJO3Y6VOVV5dyNW49GMy5EzoSB0SM9EkFtb8/o8UIXROvqjCuS0j9fLct1R0LTCO/bR2jPHjwPPTTq9pHjxwGwLVyYuG22RI6EENR2BWnzRjjS5mdXbQ97G3up7QzQE4iipiGWslGI/fR+Q6Cun1fEnMLsddbKEjjNYuy0mHES8pvf/CY33XRT4np+fn7istfrZcuWLVx88cX88pe/5J133uHGG2+kqKiIT3ziE5Ox3JHxtkD3cbMbLV20KDz7bfC1GK3yF37VKIDOMX41ROtI6bQJnp9W563jF3t+gS50zppzFlctu2rCjp0OVtlKpSv3dU9iJEfqLKD29mKda9RMxRobh7lMp4PW6yHW3o7VJYPNDbQNjhxlI62mqokol9raiubxoBSmtk6IHjsGgG2JkYKdTe7Y7b4IoehgQR2IaAQiGi2eMJJkRF4KHFYKnVbcDgvKkDlnnlCMSGzsn9nBqMoLhw0/rMuy2L4Pxty1qeBrNh2Yce/4/Px8qqqSv6EeeOABotEo9957LzabjdWrV7Nnzx5+/OMfTy1xpGvQdczsRssEocPLP4GOA8ZYjou+MSGjOUbtTgPDCXuC/IQ6Q5385K2fENEirCpZxQ1rbphyH4bV7uoJ8VfKRafaoP37A4aPkqomNVJMl2htLfKCSpR45GiQ11F20moDDSkjhw/jOu20pNvq0SjR+noA7EuNYcyzJaWm6YLGnpEFtRDgC6v4wipNvSFkCdyOPrHksuK2WcYdNXrxcAehmEZ1kYOT52bX/8usN0qfGZVWA/jud79LaWkpGzZs4Ac/+AHqgELEnTt3cv7552MbECK+9NJLOXToED09yd2lI5EIXq930E9OiYWg5Z+mMMqUt38PtS+BbIHNX4Gi3HnnxNGETn2og7A+QjrNlgfO3BgcDsUf9fPjt36MN+qlxl3DZ9Z/Bos8tc5/HIqDcufERNFyLY7A6OyKNiQ3fEwbAZFjtQhLX81RNLuRIzEgcgSGOEpFrLYWNA2lqAilxBjnMltSas29IaJqZq+3LsAbUmnsCbG/ycuuuh66A6OPl0m9P8FTfSm1raursn5iY3aqpc+MeqU+//nPc8opp1BSUsKrr77KbbfdRktLCz/+sTFotLW1lUWLBvvcVFZWJu4r7mtbHcgdd9zB7bffnvvFA6gRo03f7EbLjCNPwzt/MS6f9TmYszanhwuoYdqjHrpjPvSRUp4TOD8tqkX52ds/ozXQSomjhFs23pJT/6CxUpOfuzEhQ8lVG/9AYi0t6KHxF30LTUeLSMYHcpYjRwyJHIUPHUq5aeToUcBIqcX/n2ZD5Ciq6rR4xv//qI0htTqQPQ29tHrDuGwK5y3L/kmEGTlKnykfOfryl788rMh66M/BgwcBuPXWW9m8eTNr167lk5/8JD/60Y/4+c9/TiQy9jDnbbfdhsfjSfw0JHO3zRa6agqjTGneAzt/YVxeew0svSgnh1GFRnukl/2+Ot7119MZ9YwsjGDC5qfpQufXe3/N0d6jOC1Obtl4C8WO4UJ/ssmz5k3YuoQQiBy18Q8kG8IIAFlGlxwAiGHDZ8eH0LRBkaNYfX3KqFqkr97IvqTf8mE2iKPGnuC4hU02eKqvff+CFRU5cbE2I0fpM+VfqS9+8Ytcf/31I26zePHipLefccYZqKpKbW0tK1asoKqqira2wemq+PVUdUp2ux27fWqMWTAZQk8tPP//gdBg0SZY/5GsH8KvhmiPeuiJ+UcXQwORLRMyP00Iwf8e/F/ean8Li2Th8xs+z1x3bo0Vx0oux4QMRUQiYyqQnjQkGRRDSOu9HSS+FrPxFFR18BBcIYgcOYJz7fAI69BibJj5abVQVBt3nVA2aOoJsbfJgwRsOSn7nx0OqzyseNwkNVNeHJWXl1NePrbw4p49e5BlmYqKCgDOOussvvKVrxCLxbD2nQ1t376dFStWJE2pmUxhQj1Gy34sCBWr4Zx/z5rztCo0uqI+OqK9I7fnj4S7YkLmpz1V+xQ76ncA8PGTP86KkhU5P+ZYKLQXkm/LH33DLDER9UbZRU7UHImgh1hHD9byYrJWkN0XOZILC9E9HiKHDw8TR2p3N1pPD8gytgHlBzM9clTfHcyW1+a4iI8K2bigmIoCR9b3n2f6G2XElE+rpcvOnTv56U9/yj//+U+OHz/OAw88wC233MK1116bED4f/vCHsdls/Nu//Rv79+/nT3/6E//93//NrbfeOsmrN8mIWBh2fBMCHVBQDRd8xWiXHyc+NcTxYCt7PMepD7WPLoyEgECStIrFMSHz015veZ0/H/4zAP+y4l84fc7pOT/mWKlx10zo8XLdxp91ZBmhGOJI0oJEW9pRvf4sFWRridfDuW4dAJEkdUfxqJF13jzkAdHymRw58oZj4yqgzhaBiMpLR4z2/a1Zbt+PY85Uy4wZIyXtdjt//OMf+cY3vkEkEmHRokXccsstg4RPYWEhTz/9NJ/5zGfYuHEjZWVlfO1rX5tabfwmI6Nr8NIPoesI2AuMln3H2IeoxnSNrpiXjohn5K6zoQiB3NgFgTD64kqwDRBnBdU5n592sPsgv33ntwBcPP9iLl1waU6PNx5KnbkdLpuMaRc5kgaII9UQMtH6FmSne/xnsFp/Ws25YQOBF18kcuIEIhYbFBWKF2PHW/gTS5vBkaP6rqkhop871E5E1ZlX7OSkObkZCm3WG2XGjHm1TjnlFF577bVRt1u7di0vvfTSBKzIJCe8dS80vAayFS78L0OIjAGvGqQz6qE76kdkmroQAqmpC8lnfLDKTd3oCysMQTQB89MafY38/O2fowqVjZUbuWblNVPOyyiOhDThUSOYhuIIKZFWkzRj7ULXibV3YV8+vj3rsVhCHNkXL0YuKED3eomcOIFjef/Oo0mKsWHmiqPuQBRfePIbYHRdJByxt66Zk7O/ZbNTLTNmTFrNZBZw8O/w7t+My+f+O1SclNHDY7pGa7ibd7y1HPI30hX1jU0YNXcje/vPOKVQBKnLNyHz03rCPfxk908IqSGWFi3lppNvQp6A8ShjpTKvEpsysWkZIcQ0FEeApU9Uq8FEOk0PjD+yofv603Oyy4W9TxANTK0JVSVaWwsMKcaWpRkpjoQQ1HdPjajRW/U9dPgjuO0Wzlmam3S8zSJht5jiKBOm7qeqiclAGt+EN+4xLm/4mNGdlgZCCDyxIMcCLezxHqMh3JlZ+mzwzpBaepA9gWF3Se0ekAtyOj8tpIb4yVs/oSfcw5y8OXx+w+cnXHhkgiIpzMmb2GG7ACIUyk6X1wQjrIY4ktBBN7qn9GgUERvfdHfdZxjXSlYrks2GfYVRtD/QDDLa0ICIxZDz8rBU9ndKzURhBMnHhEwWT/a171+4siJnAsZlptQyxnzFTKY+Xcfghe8ZI0KWbYGTrx71ITFdpTPqpSPqIaKP78sljtTai9ybfCioJFuQu1T0Mh3k7J9zCCG4b999NPobKbQVcsvGW3Dbcpu+Gy/V7upJceiellEjANmOkCxIQkXSggjFAUKgBwIoRUVj3q3W5+ovuYy0XTyVFjlyBKHrSLJMNG7+uHTp4LTODBRH6YwJmSgauoO82+JFluCSHLTvxzHrjTLHjByZTG0CnUbLvhqGOevgzE+nLHY2okQBjgaa2eM9TmO4M4vCqAe5ZwRzPlcpUjiK1NqZleMNZXvddna17UKRFD6z4TOUOctycpxsYZWtVLgqJuXY01ccyYg+ryNJ7X8Omn94pDITNJ/xvpXz8gCwzp+P5HAgQiFifaa2ycwfAeQZ2Kk2ljEhueKpvvb9UxeWUObOnZ+eWW+UOaY4Mpm6xIKw4xsQ6obC+bD5NsNccQhRPUZzuIu9vloOB5roiSWP7owVqa0XuXsEYWRxGjPUALmtC/zZPSs93HM40bJ/zYprWFq0dJRHTD41+TWTVgslprE4Ij5fTYu/hwR6YOzvZ6Fp6H3iSu6LHEmyjH3ZMqA/tRYdMDZkIDMtrZatMSHZwB9RefmocTJ16erctO/HMSNHmWO+YiZTj4jPmJd28HEItIOjCC7+OgxIIwkh8KgBOqJeerMshgYidXiQu0YaNixB3uAiSrm+BX3FIlDGLw48EQ9377kbXeicOedMLpx/4bj3mWucFieljtz7PKVi2kaOUIa188fTamNFDJirFo8cAdiXLyf8zjtEDh3CdcYZqB0dIEnYh0wbmGkeR1NlTAjA833t+/NLXKyqyp1BqiJLOKxmHCRTTHFkMnXoPgEHH4PjL4DWZ+fvKISLvpYYxRHVY3T01RLFcjyHTurwIHd4Rt7IXgDK4HC4FIkiNbch5o2vGFnTNe7+5914oh7muudy3UnXTdmW/YHUuCduuOxQhK6jh1NHBnw7dqAHgzjXrcM6b97Uej0lqT+tpvVHH0VMRQ+HkR1jcE0eMDokHjkCEh1r4cOHE/5G1jlzBm0DMytyNFXGhMDg9v1LV1fl9H3osilT630+TTDFkcnkomuGb9GBx6BtX//txYtg1Xth0SaEYqM35qcj6sETG1/9RbpIXd7RhZGkGMNlkyB39qIVuKFw7GeEfz3yVw73HMahOPj0+k9jt0z9GX+VrkqKHEWTdvyROtWiDQ30/M//AOB58EGUsjKc69fj3LABx8qVSJZJ/jiU5WFeR/Eno/v9YxJHg0aHDBRHixeDxYLu8RB89VXAKMYeykwSR1NlTAjA7gajfT/PruSsfT+O6Yw9NkxxZDI5hD1w5Ck49A9jDAgYwzfnnw2rroCK1USFSnvEQ2fQm/Mo0UCkLh9yW+/oG+aVjtiZJje0ouc5YQxfum+1vcWTtU8CcOOaGyelJT4TnBYnCwsWTnoH3UgptWhdHdDXtRWLoXV24n/mGfzPPIPkcOA8+WRDKK1bh+KejOchQ19aDXVw3Zru90NZ5kX4IkXkSLLZsC9aROTIEYK7dgHDi7Hj280EpsqYkDhP9UWNLlyRu/b9OOZMtbFhvmomE0vXsf7UWbyTzF4Ay7fCivdAXhk+NURbsCXrhdXpIHX7kNt6Rt/Q6oRRBqlKMdUQSIsyc4huDbQmRoNcuvBSTq06NaPHTyQSEtXuaqryqpBiKrHWViRFQS4snJROp5HEUbwzK++ccyi6+mrC775L6O23Ce3ZY0RQ3nyT4JtvGrU3y5bhOvNM3BdcgJQDa4akSANdsgeKI4E21rqjgZGjATVHYKTWIkeOJAwiZ3LkaKqMCQGj7mlfkwdJgktOym0hNpiRo7FiiiOT3KOrUPeq4XDd/m7/7aVLYeV7YdF5aLKF7qiPdl8dQW1y6gKkHj9yaxrCCAlc5ents9eH1O1BlBSmtX1EjfCLPb8grIVZXrycDyz7QFqPmwzybfnMz6vB5gsTbTqC5vEMSmnJLhdKUSFKYSGy2z0hIkMPji6ObDU1yHY7rg0bcG3YgNB1oidOJIRSrKGByOHDRieXrpN/ySU5XzcAsjKg5mjA8xACEQwmPIkyYVBB9pB6IvuKFfD448bxHA6s1YNH8cwUd+wuf2RKjAmJE48abZxfTHl+blPlkmQaQI6VMb1qO3bsYMeOHbS3t6Pr+qD77r333qwszGQGEOqFw0/CoSeMdnww6nQWnGPUE5WvJCJU2iO9dEa9qGLyHGul3gByS3d6GzuLQEn/S0NqbEW4XYOH0yZBCMHv3v0dTf4mCm2FfHLdJyfFRHE0FGTmUkxhj0A/fpCIpifdTg8G0YNBYs0tSIqMXFCAUliEUlQ4aOp7NhGh1BGCaGMjYEydH4gky9iXLMG+ZAlFH/wgakcHvqefxvf003gee4y888/P2XoHIw3vVgNAIHSBHgxmnu5T1dSRo6VLjW9PIbAtXjxMeM0EYTSVxoQABCIqLx0xygi2rsl91MhpVVBksxh7LGT8yXv77bfzzW9+k1NPPZU5c3I3JM9kGtN52Ciwrn3JiBqB0Y6/fCusuAxcpXhiQdqDLTltw08XyRNAak5TGCk247lksn9NN9r7l8xLaWAJ8FzDc7zW8hqyJPPJdZ+kyJ7ZcXJOKEJRACojDhS1k0ykrNB0tJ5etJ5eAGSnA6WwL6pUUJCVqJLQNPRI8roSzetF93hAkrDWjJzmtJSXU/ShDxHcvduoS3r2WQouu2xMawru2kVg505KbrhhdGEjJfM5wojGSRhO2RmKI6FpiBSRIzkvD+u8ecTq65PWG80Ed+w2b4RwLLl4nwxeONxBRNWZV+zkpDkFOT9enmn+OGYyFke//OUvuf/++/noRz+ai/WYTFe0GNS9YtQTdfQPtKRsuZE6W3gumqzQFfXS7qslpE2R4khvEKmpGyndYVyuUuNLLEMkXwCpswdRnry77VjvMf734P8C8MHlH2RFyYqMj5ETYipSjxdrr59KqYh8Wz4w/i8bPRRGD4WJtbYhyRJyQSFKUSGWsjIkZWwf6OnUG1kqKtKKAkkWC4XbttF97714H38c9wUXZNwtpvl8dP3mN4hQCP+iRRReccUoB5VTRI4MdL8fKjMbMSHU5D5HcQq2bsX7+OPknXvu8OVMc3GkavqUGRMCoAvB0+8ajthbcty+H8dMqY2djF+5aDTK2WefnYu1mExHQj1Gx9nhfxiXwXCxXniuIYrKVxDWorRHe+iMetHE1DmLwxtEbuxKXxjZ3GB1jb5dCqTmdkR+HjgGfzl7o17u+uddaEJjY+VGLl1w6ZiPkRU0HcnrR+ruBV+QYnshZc5qFCk3Z6FCF2i9vcZPTw/25cvHFEmKi4BkRPvE0dCU2kjknXMO3r//HbW9Hd/27RS+970Zrcf76KMJt+7Q22+nJ46GtfLDwHb+jNGSd6vFyTvnHPLOOSfpQ6f76JAWT5iYNkV694E9Db20eSPk2RTOXZr78T9Om0JJ3vT+P5xMMhZHH//4x/nDH/7AV7/61Vysx2S60HEIDjxqRIviqTNniZE2W74V4SjCqwZp9TfiTXIWPOn4QpkJIxQjajQOJF0g1zWjL1+YSK/pQueevffQE+6hKq+KG9fcODmpaiHAH0Tq9iB5fEiajk2xU5U/H6fFOWHL0DxeosePY1uyJOPXYaSxIbG+eiPbKCm1gUgWC4Xvfz9d99yD7x//IP+ii5IKjKTHa2vDt2NH4nr02DE0rxelYIRUysDIkR4x/q5kS6KbTA9HELFYRhEdPRxGRI0obbprTyxnGkeOIqpGc+/Uckp/ap8RNdq8ogKHNbfprsoCOwtK88x6o3GQsTgKh8Pcc889PPPMM6xduxbrkD+gH//4x1lbnMkUQ4sZdUQHHoOuI/23l680okQLzkaVZTr7UmfZGvqadfxh5MbODIQR4CpJOtctU6RgGKm1EzHH6HZ75OgjvNv1LjbFxmfWf2ZChUgCIZDqmpF74mNSJMqcZZQ4S5AmYfyi2tWNZLNhmz8/o8elk1bLJHIE4DrrLDyPPYba0oLvqacovPLKtB7n+etfQdNwnHwymsdDrL6e0N69uJOkrxJIEigOBJLx3tRCIOczsAVQDwRQiorSXr/m8Sb2LTkze29NZ4+jxp4QU2RKCABNvSH2NnmQgEtOyiw1mglWRWJxuduMGGWBjD/t9+7dy/r16wHYt2/foPvM4uwZSqDTSJsdfgrCvcZtsgUWbTK6zkqXEhqQOtOnUupsKIEwckMHUiZWuYoDHNmbfSS1dSEK3ezxH+Lvx/8OwPUnXc9c99ysHSNthECq7xdGTquLSlcldmVy3bhjLa1IVivWOembX6Zq4xeaRqypCchcHEmyTOGVV9J11114n3oK9yWXDCuKFghUTUfVjTqX6PHjBF9/HSSJyNZtyO/sgfp6Qm+/PYo4ko0fxQFaCEkLIqyD33eaP0Nx5DVc3iWnM+NU5bDIkaaCMvVrWIJRlY4pMiYkztP7jajRKQuKqSwYwxiYNChyWVlS7sZmMeeoZYOM3+nPPfdcLtZhMtUQAjoOGFGiulch3mbvKjXMGpddinAU0qsGaJ+qqbOhBCPIDZ2ZCSMkyCszfmcJSQg6j+3nN+33A3DR/Is4s/rMrO0/bYRAamhF7vYiSRIVrkoK7YVIWXyu4yFa34BktWJJwxlaxGKIWPJIpdrWZqSjbDYs5en5UwkEmm78iLUbkOfORW9qovVvj8Hl70PVBKpuiKJBg0yFwPnnP6MAsfWnESmpRF68EhePE963b+S0WF+hv1BcSFoISQ0Oi23qgczqjnSfDzBSapLNioimH80dts6I14igTnGm0pgQMMTai33t+5euzn77vizBgtI8qgpzI7pmK+M6DWjsy+PXZJDHN5niaFE48aIhirqP9d9esdqIEs0/kxgSXVEPbb5aolM1dTaUYAS5vgNJzzCq5SiALM80i4oYd7Y/SFANsqRwCR9a8aGs7j9dpMZW5K5eACpcFVPPOgCIHD+OZLGMGi0ZadhsvN7IWlODDqiqlhA2qi7QNEFMF2i63id6jNsHKhPlgq04/99vUZ9/lsBp54E7eSRRObgf5cRRhMVCdMt7jLXNnYeeX4Ds8xI6eBDXyScnX2hcHFlcEO0aNl8NSHgWpYvmNSKCcl4eSkEBWk8PIoUv1bDlDE2rRXxTXhx5QjF6AlPrM+nFwx2EYzpzi5ysqc5u+36eXWFphdvsSssBGb+iuq7z7W9/mx/96Ef4+7on8vPz+eIXv8hXvvIV5Imy2jfJLoEOw6zx8FPGGSIYnj7x1FnJYoJamLZwF11RLyKTep3JJhQdmzCSrSkHy46H3/t2UK+2ky85+dTS6ybF6FFqbEPu7AXAYXFQOAWFEQACIkePYl+5CsU9vBUdQNMFIa+fUFTtT2/1CRxV09EPHUMGAqWVdLf6xrQM7aS1aNU1KM2N2F7cQfQ970+ykYbtyUcBiJ29CVHU996RZbSVq5Hf3En7zjeZt3o1SrLPyUTkqM8lOx6NHRAGETEVPRxOy1bAGDrb36kmWSzIeXlo3jReA4nBMwGFMMTRFGcqjQkBo30/7oh96erKrJaezC1yUlPsRDaLrnNCxp/KX/nKV/jtb3/Ld7/7Xc7pawF9+eWX+cY3vkE4HOY73/lO1hdpkiOEgLb9cPBRqH8N4rVCeeWw4nJYtgVhz6cn5qfd34BPnVrdH2kRjiLXtWcujGDMnkYj8UJoLy+G30FC4lOF76WsLYRepEGOh08ORGpuR+7oN72syquaMqm0OEIIYn2RHE0XqG+/g1i6DNXqIKbpRFWdmKYT67tfbmpD7koeVXE0NSIDelV10vvTQpKIXnI5zt/9CutrLxE770JE/uAogGX3GyjtrQini+jmwSNH1JVrsL65E/a/Q21ngPmlLqxD/Zzi7zVlqBHk4BMR3e9PXxwN9DhSLMj5+WmJI8lqHfxFHgv2p9anKB2+CP7I1BkTArC3sZdWbxiXTeG8ZemldEfDokgsr8yn0Dl9uwmnAxmLo9/97nf85je/Ydu2bYnb1q5dy9y5c/n0pz9tiqPpgBo2Br8efAx6avtvrzrZ6DqbdwYxoCPaS7vvBDF9an3gpE04ilw3hogRgDUPbMkjFWPlRKyV3/ueAeADeeey2rYAoipSUxtiwTi+uDNAaulAbutKXC92lGBXJqZWIV64rA2K7PSnt4zbQBtaxxN/vHcf2pIVSZ2bpRHSanJrCzBOcQRoK05Cm7cApaEO6/Pbib53wNy7aATbM08YFy+8FJyD2+a1pSsQFitybzfhxkZqqWFBqQvbQIE0MK3G0OGz/eiBAKRRh4U62ONIslqQ7XbSSToNS6mFvck3nCLENJ26FOJ4MolHjTYtL89a+/6cQocpjCaAjMVRd3c3K1euHHb7ypUr6e5OcwSDyeTgb4dDj8ORp/tD5IodFm82UmfFCwmoYdrCHXRHfdMrdTaUSMxIpWljOduVIW98nkZD8esh7vQ8SgyN9bYlXO46o/9o3R60QjcU5XacgNTaidzambhuUSyUObP7PFVdxxOMDRY/yQqXx4AUjaLUHkNbvHR411QkRVQzHELuMcSgVjlOASpJRC9+D8777sb6xivEzr8QUVgMgPWV55G9HvTiEmJnJulIs9nQli7HcnA/yoF3iFVVU9sZYF5JHs74l+aAgmwYmFYbvCstTTNII63WN1fN5UJSFOQ0x48Mi0yFeqHln1C+CixTr028vjs4pQwfAVp6Q+xp6EUCtpyUnUJsqyJRlaNuN5PBZJwzWLduHXfeeeew2++8807WrVuXlUWZZBEhoGUvPPtteOjjsO+vhjByV8LGG+Hq+9HP+gxdeaW866vnXX/99KspGko0ZqTS1DGmAZzFRr1RltCF4B7vE3TqHsrlQj5R8B7kIbUHckMrxHIXoZPau5BbOgbdVumsQs6y67WmC9q8Ybr8UTzBGIGISiSmjVsYxZFCQeS6EzAwGhiLIanJXzu5rS9qVFAIScZnZIq2bCXawsVIqortue3Gmvw+bC8YEcHolveCJfl7R121BgDLAcMCRdUEdV0BAtEBax/kkp08rSaCQUQa0VAxJHKEohh1R67R/Y6GmVU2vw2Pfg7+e+3g134K4A3HaPdOrdZ9gKffNaJG6+cVZa2TrKrQgUUx63ongowjR9///ve5/PLLeeaZZzjrrLMA2LlzJw0NDTzxxBNZX6DJGImF4fhzcPDv0FvXf/ucdUbqrOY0Ygjaox46vO3Epng9QdpEVeTajrELI8UGzsKsLumx4E7+GT2OFQufK3wfefLwD0pJ1ZAbWtAXZ+bDkw5SezdyU/ug2/Jsbty2DCe8TxFkvw8a69DnLTTMDcOpa+Hk1mYA9KoseUjFa49+/XMsu3YS3XQx1peeRYpE0ObOQ127IeVDtZVrgD8hN9Yj+byI/AJ0XVDfFaCm2EW+w9rnc9QnXuLdakP60oUu0IOhlAXq/QccEDnKy0PqK7CW3e6UnlBxlMIBfwNaDBpeMy5Xb4Ap1HSj64ITHVMvnRaKarxwOLvt+1ZFYk7hJJjEzlIyFkebNm3i8OHD/OIXv+DgwYMAXHXVVXz605+munpi6iZMRsDXCgcfh6NPQ7TvQ8Nih8UXwsoroHgBfjVEW6id7tjU7z7JiKjaFzEaRwQmr5xsehq9EznBw4FXALgu/xIWWFO740oeP1JXL6K0KGvHlzp7kJvaBt8mSVS5cufSOxHIvT2gWNDnzoPISPVGcXGUvpnkaGiLl6EuWY7l2GHsD/8R5dhhAKKXvW9E4SAKCtHmzkNpakA5tB/1VOPkUgho6AlSXeikKOnw2eFRNz3gH1UcDSrI7utWA1Dy81HbO1I+zvBEGpA6i/iMyBEYKfgpRLMnRDA69U7sXjzSQSimUV3o4OSa7JxszSlymuNAJpAx9RBXV1ebhddTCSGgZY/hTdT4JokPU3eVIYiWXYxuddEd89Hmqyeopf4ymbbEVKPGaDypKXshWLKXz+/SvPzS+zgC2ORYy3nONaM+RmpqQ7hdYB9/XYfU1Wuk64ZQ5izHksW04WQhd3UgrFakvtlhyVAS4ii7J27RS96D5dhhLEeME0R1xUloS5aP+jh15RqUpgYsB/rFEQACYxaYqlIySkE29A2hrRxF4Krq4MhRX/H3aHVHSuGQlJq/DdrfNS4vvmDkY04g4ZhGU0/2O2j3NvYiSRIrKvPH5DatC5FwxN6yumpYCn0smLVGE09a4mjv3r2sWbMGWZbZu3fviNuuXbs2KwszSYNYCI49a6TOPA39t1dvgFXbYO5GokKjPeKhI9SGOlNSZ0NRNaMrLQP332FISlY9jWJC5eeev+EXIRZaKrk2/6L0lqHpyPXN6EsXJIbTjgWp24Nc3zLsdrtip9hRPOb9TjWU1mbE0Jb4OEL0d6rNye5oFn3BYtTlq7AcPoCQJKJbt43+IEBbtQZ2/APlyEGIxYZ13nUHVRTNghMGmEAmOX4aRdlC0xADa47iaTWHA8lqTekoPiilBoZDvq4aUdWyZaMed6I40RnI+vy0f+xr4X92GmUIVsUQSCfPLWTN3EIWluWlJXT2NXlo9oRxWhXOz1L7vhk1mnjSEkfr16+ntbWViooK1q9fjyRJiCT+7JIkoY2pO8gkI7zNhiA6+ozhPwJgccLSi4xIUWENXjVIe7CVnlhm4wamHapmpNLGI4zA6E7LYi3FH/zPcUJtJU9y8NnC92GT0g/SSv4QUkc3omKMnWS9XqQkwgigcgp6Go2XVB2JkqcHKRxCyDJ6WUXWjxvdug25uRH1lNPTjkzp1TXoBYXIXg/K8SNoK04adL+QZDyqlSoANdjnPZYkrRaOjDyKBBBqf1pNcecNmq2m5LtRu3uGPUaSJeT8Ae7fQhjiCGDuqeMS7Nmk0x+hN5hdJ+w3T3Tz+z5hlO+w4Aur7Gv2sq/ZC2824LZbWF1dwMk1hZxcXUhFikjOk/uMqNGm5eU4beNveDCjRpNDWp/YJ06coLxvJtGJEydyuiCTFAjdyPsfeAyadvXfXlBtCKIlF6NZHXRHfbT76ghqU697I+vEI0aRcX5IWp1gy95g2VdC+3k2tAcJuLngcsqVzGsOpJYORL4bnBmOLun1Idc2J50fV+QowmmZPQWdcktfSq2iarDbc5bQ58wl+JUMywskCW3lGuQ3XsFyYN8wcQQSmtTnkI1A18LIKQaF6YGRh9CKWDQhjqQh3Wdyfj4kEUdyQcHgAbXRQH+9Uc3GkZ/bBKHmwNPoaLuPnz93BAFctLKCfzt3Ec2eMO80etjX7OHdZi/+iMrrJ7p5/YRhWVNZYGdNdSEn1xSyek4hboeFNm+YPQ29AGw5KTt1fWbUaHJI6xNjwYIFict1dXWcffbZWIZ82KiqyquvvjpoW5MsEA3CsWeMSJG3uf/2uaca3kTVG4gIjfZILx2hZjQxtdpsc4amGzVGkdT1JukhgSs7oW+AerWd+31PA/A+19mssy8e26p0gVzXhL58YfoRLY8PubYpqTBSZAtlzuw9z+mAnKN6o/GirlqD9Y1XUA7uNyIzA6IxQpIRshVdsiKLGF29vZSUCpLFHzS/f0RxpPv9iU63oamyVHVHw1JqPbX9MxbnnjraU5sQ6ruDRNXs5dPavGF+8NQhYppg/bwibjhnEZIkMbfIydwiJ1vXVKHqOsc7Auxt9LCvycPRdj9t3ght3nZ2HGxHAhaX56HIEgJYV1PInKLxn4iYUaPJI+PTqQsuuICWlhYqKgaHqT0eDxdccIGZVssWnsa+1NkOiI/tsLpg6cWw8nIomIsnZqTOemd66mwocWEUHq8wApxFoGSnODmoR7jT8zeiqJxsW8j78s4a/UEjIIUiSK2diOo0UkK+QEphBFDhKkfJsqfRVGeqiiNtyTKE1Yrs6UFuaUKvHjC4u88IUpMdyFoMLRqgvjtITZV72LgR3Tuya3ViTIjViuIc/EUt5+UhyRJiSNHOMHF0/Hnjd/FCyEvDlTvHeMMx2rLoaeQLx/jekwfxhlUWlrr4wkXLkkZpLLLM8sp8llfm88GNNYSiGgdavLzT5OGdJg9NvSGODbAU2LomO+37ZtRo8shYHAkhkg7P6+rqIi8LJmuzGqEb3WYHHoPm3f23F9YY3kRLLkCzOOiMemn31hLWsyAOpht6nzAKZeEDUrGBo2j8+8H4u/i19wnatF5K5QI+WXAFchbmsknt3YgCN7hdqTfyBZCPNyClqE51WV0U2LLr3TQdmKriCKsNbekKLAf2oRzYl0IcObFqPhQ9RDSmUdsVHDZuRPP7EZqW6EIbiub1AIOLsROHkSRkt3vQnDXJZkUeIqKofdn4PWe9YY46iQiRXU+jqKrzo6cP0+IJU5pn4/9cujLtER9Om8IpC4o5ZYHxmnQHouxrMlJwhU4ra2uKxr0+M2o0uaQtjq666irA+KO6/vrrsdv7ayE0TWPv3r2cffbZ2V/hbOHEi4YDbWLWmQQ1pxmpsznrCesx2qO9dAZnUepsKLqOXN+ZHWEEWR0s+4/gm+yOHsWCwmcLt+GWs1PbIwmBXN+CvmIRJHPG9QeRTzSmFEYgUenKzlnstCIWQ+40jC+nnDjCSK1ZDuzDcnAfsYu2Jm4XfcXyWt/7R9FDgCCm6pzoCDC/dMC4EQG6z5cytab7DOEz0ONoIEOH0CqFQ/YTi/TXN85ZD67JjRw1e8JZ8zTSheCXLxzjUJsPp1XhP7eupCRv7PYZJXk2zl9ezvnLs5e6NqNGk0va3wyFhYUUFhYihCA/Pz9xvbCwkKqqKj7xiU/w//7f/8vlWkfk+eefR5KkpD9vvvkmALW1tUnvf+211yZt3QnclYYwsuXBSVfCVb9GXPhVPOXLORRo4h1fLW2R3lksjARyQydSMEseTTa3kabMAgei9fw58CIA1+ZfyGJr9gwHAaRIFKm5bfgdgZARMdJSvyfKnGXYlKk3CyvXyB2tSLqOcLoQBVMvaqatWA2A0liP1BfhAQZEjoz3pqKHEnVDmj583IjmS23kqvWl3QZ6HA1EGVJ3pBQNeZ3a9kGgA2QLzD8TrJMXxQjHNBq7U/s+Zcqf3mxg5/EuFEni1kuWM68kO58F2cKMGk0+aUeO7rvvPgAWLlzIf/zHf0y5FNrZZ59NS8vg9uWvfvWr7Nixg1NPHVxI+Mwzz7B69erE9dLS7A7fHBPlK+CD94EtH9ViNVJnvloienbbVaclcWEUyJZ5pWJEjcaLLvD0tnGX9hgCwbmO1Wx25Ga+oNzZi1aYDwV9X2jBEPKx+hGFkU2xUzLJqZDJIuFvVFU9ZdrPByIKCtFq5qM01qMc3I96uhF1F1I8cmR8MRqRo37i40bmFjkpcNoSAmjY/jUNPTBwrlqSyNFAcSQlmad21JgXR/lKKMr+WJtMyKan0TMH2nj0n0bK9abzF7Nm7tQTz2bUaPLJuObo61//ei7WMW5sNhtVVf3pg1gsxt/+9jc+97nPDauRKi0tHbTtVEFdehGNx7bTFfShz9YI0VB0gdzYiRTIohOuq8Q4Gx4Pqob13gepOnCcZR+UaV9VycfyL0laj5ct5PoW9JWLIKYhHxs5YgRQmVeJlPls6RlBvN5Im4IptTjqqpNRGuuxHNyXEEcDa45guDgCI5DU2BuiWkCRRNK6o0GjQ/LykKzD3+/xIbR6MDRo9lqCOmPszWSn1LLpafR2fQ/3vmLY0XzglBo2ZTENli3MqNHUYEzfEA8++CB//vOfqa+vJzrEun/37t0pHjWxPProo3R1dXHDDTcMu2/btm2Ew2GWL1/Ol770JbZtS+1uG4lEiET6a1y8o3SIjIeoHqMj6hl9w9mAEBCKInd6kfxZFEaKAxzj9DTSBTzwMJYDxwH40EvA6duwS7kdySHFVOTaZgiFRx2sW2AvxGWZWqmCiURuaQJAnzN1xZG2ag1sfxzl6CEIBCAvj3jT/kjiCEiMG1F1QU2yuiNVHTxXLUXRdnwI7bB6I02FpreMywvPnrSUWjY9jU50BvjvHUcQwjBo/MAp2XVNzxZm1GhqkPFp5c9+9jNuuOEGKisrefvttzn99NMpLS3l+PHjXHbZZblY45j47W9/y6WXXkpNTX8niNvt5kc/+hF/+ctfePzxxzn33HN5//vfz6OPPppyP3fccceg+qp58yY3vDyj0QX4Q0jN3chHmlFq27IrjJD62pHH/sGj6zptf/kDzj1HUGWIWmBhm86cY71ZW+VISL7AqMJIkRXKs+jdNB1JdKpVTl1xpFdVoxeXIMVi5P3wm9ieeQJCxvt9aEF2Ktq9Yerq2oZNLDDSan1z1VwuSCGOlD437GH1RvWvQtQP1jyYP3mNNg09oax4GnX6I3z/yYNEVJ011QV8/NxFOY3yjhUzajR1yFgc3XXXXdxzzz38/Oc/x2az8aUvfYnt27fz+c9/Ho8n+1GPL3/5yykLreM/Bw8eHPSYxsZGnnrqKf7t3/5t0O1lZWXceuutnHHGGZx22ml897vf5dprr+UHP/hByuPfdttteDyexE9DQ0PKbU3GgKYjeQJIjZ3Ih5tQ6juQe/2jCoAx4SgAS4aO0wNoUjt57W+/ZOFr9ejAn99Xiv8so3bN8uwUKOrvo9xVgSWDcSUzDcnvQ/b7EJKEXpnd4visIkmEr7kOrXIOUjiEbceTOP77x/DqLtSYEYUcTRwBdLZ2cazDP0ggiaGRoxQO4bLbjWRRkIfWkB5+yvg9Zy3kT85r6AvHaPWMv84wEFH53pMH6Q3FmFfs5JZLlmNJ1vk5BTCjRlOHjD9B6+vrEy37TqcTX1+3xEc/+lHOPPNM7rzzzqwu8Itf/CLXX3/9iNssXjzYhfi+++6jtLR0xHRZnDPOOIPt27envN9utw+yLTDJAqqG5Ash+UIQCKc0LswqsnXMg2WjQuXvgdeIvfgq179oiLZ921bxvvO2ofT6Ea8cQDlSh1TXjFgwuZEKp9VF4Sz0NBpIPGokSspgiv/t6vMXEfr8f6Ls/ye2HU+itLUgvbKLwC4LncvduE5KI3IaCtLRG0TVBcsq8o0v14GRo2T1RH3IDgeWsrLBURQhoPYl4/K8MyYlpSaE4ETn+NNpqqbzk2cO09gToshl5T+3rsRlm5onDmbUaGqR8bukqqqK7u5uFixYwPz583nttddYt24dJ06cSDqMdryUl5cn5rqlgxCC++67j4997GNYRxjKGGfPnj3MmTOFzy5nCtGYIYi8oez5FGXCGD2NDkYbuN/3NEv+2cnntxsF0J5LT2fZBRcBIIoL0DauxvLmO1iffY3oDVdlddmZUunKzjyn6cyUNX9MhSyjnbyB0Op1WP/5BrYd/0Dq6qHjnQLkQwLFdhTtpNNSPlwSAikYpEexcKDFy4qqfNA0RBo1RwDWoZ9/niZo229cXnrJuJ/eWGjxhAlExhc9FkJwz0vH2d/sxWGV+dKlKyl1T12xbEaNphYZi6MLL7yQRx99lA0bNnDDDTdwyy238OCDD7Jr166EUeRk8uyzz3LixAk+/vGPD7vvd7/7HTabjQ0bNgDw0EMPce+99/Kb3/xmopc5OwhH+wXRuGegjQNrnuEflQEBPcyf/C/wQngvG47qfPpxQxjFztuI7dILB22rXngmljffQX7nEFJbF6JycqwhSpyl2JWp++E/UfSLo2l20iPLqCdvwDrPjXxgL9aXXyLqtWJ95BGiS04Ge+qoguT3IfIL8IVV3m32skREB0WORhq8K9mG+GAdeRp0FfLKYe4pWXlqmRCOaTT2jL/W8K+7G3npSCeyBF+4aBmLyqaW/cxAzKjR1CNjcXTPPfeg68YXxWc+8xlKS0t59dVX2bZtGzfffHPWF5gpv/3tbzn77LNZuXJl0vu/9a1vUVdXh8ViYeXKlfzpT3/igx/84ASvcobS12EmeYOGKIqpoz8m58iQl75YEULwRuQQD/h34NGDrGgU/McjAosO6sbVqO+/ZJhvjqgqQ1uzDGXfESzPvUbsmsuz/SRGxapYKXNOAb+uKUB/G//U7EYaEUkCWUasWsEi10Mcf7KCmM+HbfsTRK9IffIp+fvNIINRjSMdvbjikSN3HlK6w4s11Rh0DUZKzTbxHY+1XQG0cZoavXC4nb/uNjoWbzxnEevnTW2/LzNqNPXIWBzJsow84A/tmmuu4ZprrsnqosbDH/7wh5T3XXfddVx33XUTuJpZgC6MuiFfCMkfyk0h9XhwFhv1RmnQqXn4H98z/DNqtOhv7Czgiw/6scQ0tJOWEPvXyyHFB1jsorNQ9h1B2bWP2NbzoWicdgEZUumavZ5Gg9A05LZWYBql1QYg+lK/umRHskhUbfTQ8EIp1ldfQD3lNPTqFN2y4ZAhbPrMHqMRFXsggATIQ80dR8LfDvWvG5dXTHz3cZc/Qk9gfJ5G7zR5+PWLhpfRtnXVXLRqaqeaLWbUaEqSljjau3dv2jtcu3btmBdjMk3QdKPF3hdC8oeR9ClqWKnYwDl6cbIudLaHdvPXwMtERAwLCv8aXsflf9yPHIqiLaohet2VKduhAcTCuWhL5qMcq8fywhuo77som89kRPJt+eRZ3aNvOAuQujqR1BjCakOUTMNIWjwqKcloshP3nCCsWox04Dj2h/9E6FO3QpIokCQEUiDQPyolHEKKGSIjbHeRdol+3UsQ7DS6OldObARU1XRqu8Y3IqS+O8hPth9GE4KzlpTyodOmvvVKVYHDjBpNQdISR+vXr0eSpFELriVJQtOmWOTAJDuomlE7FO8wG6W9eEqQV85onkZ1sTbu8z3NCdWINiy31vBxcS7z/+fvyL4AenUF0ZuuBtvo0Sf1ojMNcbRzD+rFZ0NedobPjoQsyVSYRdgJ5La4v9GcpCJi6tMvwDXZgUUPIm9ei3a82XDTfuMV1DPPS/pIKeBPiKN4mk1IEvURGYs/MnoxcjQAx54zLtecbkRdJxDD02jsJ1rdgSjff/IgoZjGyqp8PrVpCfIU9DIaiCxBpRk1mpKkJY5OnDiR63WYTEUmu8NsPNgLwZL6QyciYjwSeIUng7vQEbgkOx9yb+Z8fRnOXzyA3NWLXlpE5BMfAmd6H176ysXo1RXIze1YXtmNuuWcbD2blJS7yrGMdxTKDEJp6RNHU9gZe0QkI7UmCT1hBGl16oS2XIH9sQexP/V3tNXrEPnDU2UD647k+GW7A91i5Ui7H00XVIz0Rexvg4Y3jMtLL87aU0oHf0Qdl6dROKbxg6cO0hWIUl3o4IuXrMA6Rb2MBlKeb8dmmfrrnI2k9am6YMGCXK/DZKqQ6DALIkWm6dBbSRnR0+idyAl+59tOh26Ylp5uX8FH3BdSpNmx/fqPyC0diPw8op+8BgozSFdJEupFZ2L7/aNYXnwTdfPpaUWcxorD4qDQXjTu/ejRKD3/8z/ogQBKaSmWvp/4ZbmgIP2C3klGbu0bGzJF6o2EoiDc+cie3vQfJEkg+l2yLVqQ2JkXYtn9OkpTA7bHHyZyTZLayYF1RwG/cXynExQLQsCxjgBRTaemOEmRtRDQuh+6jxmWF6uuGMOzHRtCCI53+Mf1+F++cIzariAFDgtf2roSt2PqnzBIElQX5T66bDI2xvQOOnbsGD/96U85cOAAACeddBJf+MIXWLJkSVYXZzIBCAHBSMKUcWp0mI2TvNKkKRWvHuR/fc/xauRdAErkfK7Lv4T19iWgadju/yvKiUaEw07kk9cgyjJPK2jrVqE//iJydy/KG3vRzt047qeTiqq8KqRxjEKJE3jhBQIvvZR6A0VBKSkxBFPf77h4il+XnU4O9Mb49E4PqiYGNfRJQ34jJblt4HYjPRYShoXJHvud+mbKgF8EKzjyTizJY0c47oAryY+b/mMT29hlXLYAF7l11hdKaY6skAGtf4SIFgRZJnLlh3D+4kdY//kW6qlnoi1dMehRA+uOpLg4cjgH1co1dIdQNcGCUtfgtQS7oe5l43L5KihelMY6s0Ord3yeRn/b08zrJ7pRZIlbL1kxbdJUpXk2HNbUdYwmk0vG4uipp55i27ZtrF+/nnPOMdIGr7zyCqtXr+axxx7jkksmxzTMJAMGdpj5QkgzqU7M6gTb4E4xIQSvhPfzB/9zBEQYCYlLnKfwgbxzccg20AXW/30c5cAxhNVC9KarEdUVYzu+IqNeeAa2B5/C8uzraGetH7GQe6wUO0qwK+P/EhCahvfJJwHIO+88lIIC1K4utK4u1O5utJ4e0DS0jg60jo6U+5FcLhxFJXxayydssSELYXxZY/yWh/5O3Kf3XwZqC6q4e+2V6CMadiavd3PFwpQFugF4zV6FLzgV6uKMVNHrwAKXxPvmyJxVImMZsQDXuK9/vprhV6TPnU/szPOw7XwR+yN/JviFL8MQo9t43VFCHDmHz1Vr8YRRdZ0l5e5+geRvg4a+LrVF54M8MV/a4ZhGQ/fYPY121/fw513GSKcbzl5oGGBOE8yo0dQmY3H05S9/mVtuuYXvfve7w27/z//8T1McTVU0ffDIjqnaYTYuJBgycLVN7eF+39O8G6sHYL6lnBvyL2Wxtc8gUAisj2zH8tZ+hCwTvf5K9MXj63DRTjsZ8eRLyD0elD0H0DauGdf+hqLIlqx5GgV37ULr7ETOz6f4ox9FHjJuQ2gaWm+vIZa6utC6uweLp64u9EAAEQyiBIOcMc71rOk6wdrFFbSdfoFxfIZLISGS3Aa4Gw3xFnEX8oWTC5M/dsA+ht024IoYcl/qx4rh2/Vd0coqwJXH8XYfzx5ooy4o+NkxjT80aFwxR+HCchmnMlwkCVlG0kGXDfGraP0dXNEtl2PZtwe5qwPrC88Qu3hwu73k94GmIfV5HOF0IpKI8w5flJjmY3llPooeA08DtL5j3Lli67Dtc0VdV3DMnkZNvSHufPYoArh4VcWUb9kfSKHTSp596qf+ZjMZ/+8cOHCAP//5z8Nuv/HGG/npT3+ajTWZZIvp2GE2HpxFoBhn0qrQ+EfwTf4W2EkMFRsWrsw7hy2ujVik/i8Ly/ZXsLz0FgCxD1+OftLS8a/DZkU9/zSsT7yAZcdraKesHmYcOR4qXZXI0vjP7IUQ+J54AgD3RRcNE0YAkqIk0mipep30UMgQTd3dhNs76OwJ9BUWS0b9iiSN+iMkCaW5CdvzTzPvxSco3bAWUZr+2CAAy8EWAJTqatYVTW6NlHDloS01ajXPWVrGB4tCbK8L8ESrRmcU7q/T+EujxtZKmcuqFAqt/e+PuNeR6OtckxgQ2XU4ibz3Azj/cB+2559G3XDqoNdJCgUhFkUKG9EYI3KU/GO+Nxgzxo24vFib3jJcsQtqYM76bL4UKekOROkOjM05PxhV+fHThwjFNFZU5nPdWQuzu7gcM9eMGk15MhZH5eXl7Nmzh2XLlg26fc+ePVRUjDEVYZI9pnOH2XhQbOAoAuBYrJl7vU/RqHUCsNq6gOvyL6HSMriGSHn5Laz/MGptoldenNUIj3ruKVh27ERu6UB+9xj66iyILqDAXkC+LTupg8jBg0Rra5GsVvIvGrsvk+x0Is+di3XuXGRVI9Y+tuJabc165PoTWI4fwf7QHwl//LMZicp0ZqoJWQGLghTN7Tgbbc5gd+68AjdXzQ1zxRyZFzp1HmvRaAnDX5t1Hm3RuaBc5r1zFKocEvSZecZFkiQGR3m1NetRFy/DcvwIlr27iV1w6aD7Ja/HEEkMrzkaii+s8m5LHYsOv0wB0Ft+Ch0eCWsggNUiY1UkbIqMRem/nF7d1Mho+tgHy+pC8IvnjtLsCVOSZ+PfL16GZRp0psVx2y0UunLXqGGSHTIWRzfddBOf+MQnOH78OGeffTZg1Bx973vf49Zbb836Ak3SIBRF8vWN7JiuHWbjxVVKSMR40P8SO0JvI4B8ycm/5l/A2faThn2gK7vfxfrQ0wDEtpyDdn7qwZ5jwulAPXsD1udex/Lsa0SzII7ybHlU5WVvXpi3L2oUrzWadCSJyFXXoPz3d40v/jd3op5+dtoPV9IRR3l56AuXGAKiq6O/5T2L6IVFkDe4y1E4jQ4xmyxxSYWRUtvVI3ikWeNoQPB0u872dp0zS2Q+UGhllQXitUcSQ1LgkoR20slYjh9BqTvB0L942dM7KHKULK02aPuon7yOtwHorDiXzoAKpG7MsCoSVkXu++m7nKGQaugOjtnT6C+7Gtld34tVkbj1kuUUuWyjP2gKUV00PQrGZzsZi6OvfvWr5Ofn86Mf/YjbbrsNgOrqar7xjW/w+c9/PusLNEnCTOwwGw82N7v1Jv7H8ww9uhG1ONexmmvcm8mXh7ctyweOYX3gMSQB6jmnoG5Nbqo3XtRNp2F5cRfK8Qbk443oi2vGvC+nxUl1XnVWutMAoo2NhPfuBUkif+vE1ZiMhigtJ7rlCuyPP4z9iUfQVqxCFKbRNSgEcsvo4giH00jjFRYhCovQwyHkrg6knh4kffyNCUKW0ZPMdIuLoziKJHFGicTpxRLv+gR/a9F4u1ews1tnZ3cpa51OvuZwUAkghosIbb7RTabUnQBdH9SdKYWCicgRDmfKtBoAuoa7ay+KGkS15uOpOn3U5xjTBDFNA0Z/vZIJKUWWaPWOzdPo9eNdPLLHsGu46bzFLCmfXs7wDqtMSd70EnOzlYzFkSRJ3HLLLdxyyy34fMZZV37+9OkQmLbEO8y8QWNkx0zqMBsHPSLK/1NfZVfwKAAVShHX529htS25N5dc24jt/oeRdB11wypiV23Jaj3QIArz0U5bg+W1f2J5difRxVePaTc2xc7c/Jqs1BnF8f3jHwA4N27EWjm1ClljZ2/Csnc3SkMd9kf+TPhjnxj1/0jq7UGKhA1xUp76+QwVKTic6HPnQ1U1Uk83cmcHUnTs6WhRWg5JardwOBCShDRkyoAkSawukFhdIFMX1HmsReflTo29IRf/iBZxqhXao6DqYlCHm15dg7BakcIhpI42ROWQiGIoHjkaOa0mayEKOnYD4CvfgGbLrit2JkJqNOq7g9z9wjEA3nPyHM5blllN2lSgusiZlbSkSe4ZV6I2Pz/fFEa5RNORegPIDZ3Ih5tQGjqQPQFTGGHUHTyrH+c27Wl2RY8iI3G56wy+U3J9SmEktXRg+/VfkKIxtJWLiH34vSkHyWYL9YIzEBIo+48itaRuhU+FRbEwL78GJYvCSO3uJrBzJwAF73lP1vabNWSZyAc+jFAULAf3Y9m7e/SHxFNqFVVgSX3OJxwpCmEVC6KsAm3FSWgLl6C7M/9cExYLekUKYSbLRhRnBBa4ZD67xMKvVnh4f1FvYpDw0aDM5/4Z4/FWjbDWJ64UBb3GeJ8rdcMnGCRqjpK08g9EiQXJ79gFgLf8VLQpOqPPF47xo6cPEVF11swt5MOnz5/sJWWMzSJRPtoIF5MpQ1qRo1NOOYUdO3ZQXFzMhg0bRlS+u3eP/kFmMgKzrcNsDDQKL/dpuzmK4Wmz2DKHGwq2MN+SuiFA6urF/ss/IgXD6AuqiV5/FVhy7+UiKkrR165E+edBLM++Ruwj7037sYqsUOOeh0XObvGmb/t20DTsy5djn6LGrXrlHKIXbMH+zD+wP/Yg6pLlMIJgSa8YW04e1RmIJCEKChEFhejBIEpLE1IgvbokvaJqxBSWcLr6010jUGaDm8o7cdl7wAN2qb/D7cEmjUsrZS6rVLAtWIxy4ihK/YlhtVnxmiPdlTfijDlXz7vYQh3oshXvnHNyF0UdB5ou+NmzR2n3RajIt/P5C5dOy0GtVYVO5Gm47tlKWuLofe97H/a+D5X3ve99ZlgwR+ihMMrhpslexpQlKjT+rh/i7+IQGgIHVj7oPp+LnOuRRzIN9AWw/fKPSF4/elUZkZv+BewTl/ePXXgmyj8Poux+F/Wy8xElo89IlySJmvwa7Ep2zzT1UAj/c8Zw0fzLLhtl68kltukSLPv+idLajP3vDyUfmdFHvzgaoWC9r94obVwutCXLkDy9yC1NI6bbhM2OKCkbcXfDUnqptut7L9v7vkhPdYf5RKHCo80arRH4a5POo806Nzrm835AqT0+bB9SX1ptaGH4UIqaXgAgULKG6BQdYPyHN+rZ1+TBbpH54pYV5DumX6eXRZGozDejRtOJtMTR17/+9cTlb3zjG7lai4kZJUrJQdHB/drbtGAUXK9X5vGxovdQqozSZRUKY//Vn5A7e9BLConcfA3kTazHiJg/B235QpTDtVief4PYVaMZpUrMdc/FoWR/nf7nn0eEQljmzMG5fn3W959VLBYiV/0rzrt/bIzMWLcRbVVyu4V+cTS8GDqOcI7t9RSFRWj5BUjdnchtLUnT2vqcuSNGaDI6fsLnyPgtoyc63N7s63A7FhA8IBviSO7qoLbNy8LKvr8FXTfmrAFilPRgfrdh/OgvW4dqG120TzQvHengiXcM/6pPbVrC/JL0BOZUozLfMa3sBkzGUHP08Y9/nOeffz4HSzExGU5ARLlX280d2ku04KcQO5+1nM2/F189ujCKxrD99kHkpjaE22UMki2anBo59cIzAVBe2wP+kVMr1e455OWg9kOoKr6nDfuCgq1bp8UwWX3eAmLnGm7Z9kf+//buO77Jcv0f+OcZ2WmS7kE30NJCGTKkOBgyKshRcetRcCEeFJUlKqLIF3Eh8EOPHI8o7oniEVDZe4NFlCUKFIG2QOlus57n90cGCW3TzCZNr/fr1Vea5MmTO4W0V677uq/7S/sffSdGI9jzpZbjXU2ryX34w8qy1pqkzhBi4iwNLm3nVUVA1OqaP4dc4fS4JsdpPcZ2yYiWYIxjGPSNYjGnM48Xc3h0jFPhZIQl27N0+zHMPGREYbkAUV9vL/x2GRyJAiS1JQCAenUKBD60Ao8/z1Xjv5stWbGburfDlZn+6Qrf0lgGSNDS8v3WxuPfjufOnUNBQQFSUlIwZcoU7N+/PxDjIm2QKIooE+twSDiH9cJxfG7+FdPMq7FRPAEAGMhkYA43BL3V3cE0t/eTWYD0o2Xg/jxl2Uj2kTsgxkYF/kU0QchKh5CcAMZoAr95T5PHxaviESENTM+h2p07YS4rA6vVQtXP/f5BwWYYPBxCdCzYygrIfvy+wf3suWIwgmDp6aNpOvvhbebICc9DSEqGOSsXgkYHoGHDxya5UZRtPdDp8vI+R5YVbiyezZZA08GypL9z2Qn8Vili9hET5uy31EiJPO/yNbPGOkjrLIsE6jQZIVVvVF5rwJurj8JoFnFFqg639fK+DUawxUTIIOVD/4MIcebxUv7vv/8eFy9exNdff43PPvsMb775Jjp16oR77rkHd999N9LT0wMwTBJOqkUDSlCNYrEaxWIViq3fl6Aa+kaW/CYhAvdzPZDFxAASFSBVuX4CQYTky5Xgfj9m2Uj2oVshJicE6NW4iWFgvC4fsg+/A79lryWTdFndU4wiBjqZf5dS24iiiErr8v2IIUPASFtRrxWpFPpRd0Lx34WQ7NoGU9crYG6fZb/bqb+Rqz/wMj9+epfJIKRnQqivB+Tun9etomx75qjxDtmOFO3bA7/swA31J1GcwGJNqYCKMsv5K3gFfj6lR/925kZ3f5fWngYrGCCCQb02dArzTWYB89f8gbIaA5K0cowf2AFsCAVunkrS0lYhrZFXO99FRkZi7NixGDt2LP7++298/vnneP/99zFjxgyYTG28ISEBAOhFE0pQgxKxGsWosgZC1ShGNarR9NYNLBjEQoUERoV4RCCV0aIvkwwJwwFgAVUzqXVRhOR/a8HvPgCRZWC47yYI7UNj2a/QNQtCbCTYcxfB7SiEuf+lhnuR8khEK1wX9Pqi/rffYDx1CoxMBvXAgQF7nkAxZ3aEsc9VkOzaCtm3X1h2pLcGeGyJpSbF7GpKTSZ3uaTdax4ERoCLVgKOGOfMES7vkO3A1gxSdvYUxrQTcWs7CQr3Wt5flbwCSw5X4pvjv2BobjyGdk6AVnGpmFlReQIAYJJFwigLnSmrD7efxJGSKigkHCYNzYZS2no3aI1WS6GQBn5VLPE/n/7XGY1G7NmzBzt37sSJEycQH2LN5EhgmUQB51HbIPtTLFajDI3UhjiIggLxjBoJUCPB4TIGKvBNrTxTRALNLGvn124Hv3E3AMB45wgIXTq6PL5FsSxMg/pC+uWP4DfsgvmqngDPQSPVIFYZ2H0JbU0fVddeC04dmr1smqO//h/gjvwOtuw8pGtWwjD8JgAAe9aywlNIdBEc+WNKzQ9EZfN1PeJlBdmuMkdiTCxElQpMTQ3YM39DnZqB/mpL92m1WoEEFY/iGhO+/eU0fvj1DAZkx2FEXiLiNXLIq04CAAzKeAiS0Kg3Wnu4BGsOlYAB8NigDkhq5Ru0tvbxt2VeBUfr16/HZ599hqVLl0IQBIwaNQrLly/HoEGD/D0+EmSiKOIi6uxZnxLrZbFYjXOogdnFCjsVpJeCH4cAKB5qyBgP/+txUkDhejUNt+0XSFZYliYbbrwO5t55nj1HCzD36gLxx81gy6vA7fsdsqv6IkGd4LdtQRpjOHkS9b//DrAsNMOGNf+AUCVXQH/THVB8+B9ItqyHKa8HhJS0SyvV4l0VY4fIHylrUfblnbIdibisINtF5ggMA3NqBvhDv4E7eRxCaoZ9Gb9Go8KbQ1Kwq5rHD/vP4M9zNVh90BJ8XJkRhen8KQCAXuViu5UWdKS4Ch9sPQEAuL1XCq5IDcwUc0vRKHioZa0369XWefwv165dO5SVlaGgoADvvvsuRo4cae+BRFqvalGPYtRYskCXBUIGF63/peCQALVzFoiJQAJUUDN+/H+higVcBBBs4WFIvvkJAGAc3A/mAc3vERUUPA9T/96Q/LAeknU7kTTkZnsn5ECx1Rope/cGH9v6tlxwZO7UGcbuvSAp3APZ0s9Qf/84sNVVEBkGwuVbaDhys8dQwNmKsl3VHdm7oVv/X7jIHAGAOS3THhwZr3Hsjq0Ay0twZUYU+qRH4dDZSvzw61kUnirHjr/KUMifRRceOC4mwmgWIAniUvML1XrMW3MUZkHElRlRuLF7aARsvmhHWaNWzePg6MUXX8Rtt90GnU4XgOGQQLLUAV2q/XGsA6pxUQfE2euAnLM/CYwaOigCXywp0wJ807Ud7JHjkH7yvWUj2fzuMA2/NrDj8ZGpXw/wa7aBKTkPfeGvUPbsGbjnunABtTt3AgjRrUK8oL9hFLg/DoMrOQvZV58AgKUBo4sPaSGTOYJlLK6KssXLC7JdZY4AmNMsdUfsyb8AUbR3x3bcV41hGOQmaZGbpMXJCzVYsf9vpJ6yrFT74oQSy07sRvtYNXISNeiUEIGOcREtVitjMAmYt+YoKuqMSIlSYlz/9kFpNMwwQLRKikSdAqIoorzWiIo6I6r1JrhI9DVKJeOgU7aiRQ+kAY+Do4cffjgQ4yB+YqkDqnEOgGBZFXYRrnfCjoLCHgBdygRFIAbKpuuAAo3hAEXTS/CZk2cgfX8pGLMAc7dOMN46LKSWJDeGV6kQcd11qF6+EpUrVkBxxRUB+2NQ9fPPgCBAlpsLabisJFWpYfjHrZB/vgT8X38AcN0ZW+QlgCR0uiqLCiVw8ULTB1xWkG3rc9QUoV0qRI4DW10F5uIF+6azkCshNlKEnhatwoSr4pD7wzmgDiiXJMBYL+JwcRUOF1vaALAMkB6tQqeECHRK0CA7IQIahf9/hqIoYvGWv/DnuRqoZTwmD8lqdGVdILEMEKeRI1Erd3ruCLkEKbCsnquoswRK5XVG6I2ug1WAao3CAU2ItkLCZXVAjoXQ51ADwUUdUASk9qyPJRCKQDyjRjxUntcB+R0LsJzlj4PtUqpusvMwU3Iesne/tGwkm5UOwz9HNtulONjs+6UNjUXNz6th+PNP6I8cgbxTJ78/l1BTg+qNlhosTYhvFeIpU14PmAr3gD/0G4Bmmj+GSDG2TfNF2c4F2c12zpdIICSlgDt1AtyJv5ym1Zpaoccaa6DQnwcAjPvHtSgQE63BUSUOn63CuWo9/jpfg7/O12Dlb8UALNNEnRIi0MmaXYrxwyaqP/1ejE1/nAfDAE9c1xFxmpZrlshzDBI0csRr5C77EPEci2i1DNHW11tnMKO8zoCKOiMq60wwC87/PjIJi2gVZY1au2D/NSRNEEUR1TBcCn4uC4TcqQNKcMj+2KbC1ExLvWk5S6DiGOgwnPV7zuE21noc5/CJuXlMWQVk71g3kk1NhOGBW1zuxh4KnPZL08qguuYaVK9bh8rlywMSHFWtXw+xvh6S5GTI80KvON0nDAP9jbeD++sYGH09zEkumgSG0JQagOaLsht0yG4+U2FOz7QER0XH7QXZluCo8feEtOYsWMEIEQyMESlIYiVI0ikwqJNl1eSFav2lYKm4Cn9frMPpcsvX2sOWbuQxaik6JWjs2aUkndyjDOhvpyvwyQ7Lirl/XpmGLu1aZvsSKc8iSSdHXITcqw1sFVIOCqkCiVoFBEFEVb3JmlUyoEZvRpJWQfuPhoHQ/mvShvxd9Te+++Nz/GbegxLr0vgaGJs8ngODOKis018RTvVAOnj2S6p5nPUTqEMgYw9oHAIdxyAnkNNw1bWWjWQrqiDER7f4RrLeabhfmub661G9fj3qDxyA4eRJSNPS/PZsotFo3yok4vrrw/KXtajVoe7+ceD+/APmTo3vuQa4v+Fri2FZS0PKxrZCsbLUG7lXcwRY6442A+zJ4/Zml6JC2WTmSGFdxm+UR0NspD1GtFqGqzrIcFUHS++tqnojjlin3Q4XV+L4+RqcrzZgy7Hz2HLMkoGKkPNO03Dp0aomg4/SynosWPsHBBG4pkMMru8S+CatSimHRJ0cMSoZWC+CosawLAOtUgKtUoJUKGEwCeD9dG4SXB4HR5s2bUK/fv3AX/Yp3WQyYdu2bbj22tAuhg1VpbWlePfIxw1uj4bCuvrLuRA6Bkpw3gQgtmCmsUDHntVhnTM9AVxm7rF6vWUj2XNlECI10I+7E1CH2B+/RjS2XxofFwfllVeidscOVK5YgZh//ctvz1ezfTuEigpwkZFQ9e3rt/OGGiEtE0JapstjRA8bNbYEUaG0F043imHsBdnNrVYDAMHaDJItOWvfQkVUqpqcZpZVW5bxG5TuBSURcgl6pUehV7ql/q/eaMYfpdX2abg/SqtQVW/C7hMXsfvERQCAXMIiKy4C2QkRyEnUoH2sGlKeRb3RjLmrj6Jab0JmjAoPXZMZ0OA9Qs4jSadApFIS8A8JtE1I+PA4OBo4cCDOnj2LuDjnpnUVFRUYOHAgzI3sWE2al6HNwIjkwVCcOWsPhOJc1gExTUxZOU5lXZ7hYRFSgY6njCZIFy8F+3cxRJXCupFsYPYh8ydX+6VpRoxA7Y4dqN21C6ZbbwUf53szSFEQLm0VMnQomBCfbgwk0ZalCTHNFmXD+gEGAONiCt1+vggNhKgYsGXnwVSUW25TNt7skzHrIa2zTI25GxxdTi7hkNdOizzrVJjJLOCv8zU4fNYyDXekpAq1BjN+PV2BX09XAAB4lkH7WMuYispqoVFIMHFIVsACikiVZapQIw+dYnzSenj8W1MUxUaj7wsXLkClambPK9KkSHkkpveYjKPnP7os0Gkks2ObwmrNgY6nzAKkH38P7thJiDKpZSPZuNDZ8qApze2XJk1NhbxrV9T/+isqf/wRUaNH+/yc9b/+CtOZM2DkcqgHDPD5fK2aTB6SqxebK8oWGcahQ7Z768jNaRlgy85fOoc6otHjWFM9JNYNZ/3VAJLnWGTFRyArPgL/ACAIIk5drHUq8i6vM+JIiWU1HMcyeGpwR3uRs78wDBCjliFJJ2/V246Q4HP7f8+oUaMAWIpKx4wZ49T40Ww249dff0W/VrTTd0ji5IDGzR2+2xJRhOTrH8EdOAqR42B48FaIKS4a/oUInVzn1n5pmhEjUP/rr6jevBnam24Cp/WtMNWWNVIPHAjWje0qwlnI1RvZyOTNFGWzl6bVmqg5EjkOjEOm3pyWCckvuy/dH9F4cMSZaiGttwVHgfl9w7IM0qJVSItWYVjnBIiiiJJKPQ4XV+LPc9XolqxDpwT/Zn2jVFKkRStbvBUACU9uB0da6y9sURQREREBhcPyWKlUir59+1IPJBIQ/PIN4Hf+CpFhYLjvRggd/Ve47AuRYwGVAkxlTYP7IqQRiFO6t9egLDsb0vbtYfjzT1StWgXdbbd5PSb9X39Bf/gwwHGIGDrU6/N4i2Esq3nMggijWfC4eZ6/hVLzRycc57Io26kgu5GaI1EVASEiApx16xQAEKzNIC8d0/i0Gmuug6TOkmGqV7fMpswMwyBBK0eCVo4B2f7fRzBRK0datDIsFx6Q4HA7OPrggw8AAOnp6Zg8eTJNoZEWwa/dAcm6HQAA4x3XQ+iaHeQRWYgMAyEtCeB5cJcFR0qJEonqRLf3S2MYBpobbsD5BQtQtW4dNDfcANbL3jxVK1cCAFR9+4KParp5ZqBIOQ4ZMZf+KJsFASZBtHyZBZjMIky228wijIIIs1lo0CvGX0Ktx5Ej10XZjH0p/+WZI5HjYE5JBaPXO90uxCVAlMnB6OstQaGk8RWcrLHWPq1Wr2mZ4CiQ0mOUSNSG7r8zaZ08npR94YUXAjEOQpzpDeDX74Tk5y0AAOPIgTBf2S3Ig7pETI4HtJZpC1EmBaO3bL8i5+Vop27n8X5piu7dwSclwXTmDKrXr/dqqw9TaSlq9+wBAEQUFHj8+EDgWBYcCzRXWSKKIoyCNXgyCzAJcAiiHIIqs4dBVKhmjmAN3C42cZ+LzJGQlAJIZQ27X7MszKnp4P84bAmOmljGL6s5A1Y0QWRY6FUpvr6MoGEZoEOc2u91S4QAXgRHJSUlmDx5MtauXYvS0lKIl+XNabUa8Um9HvyWfeA37ARTY/lUbRzUF6ZBobMcXYiPhhhzqchajNSAKT4PKSdFu4hksIznNQ8My0IzYgTK/vtfVP70EyIGDwYj9ax3U+VPPwGiCHleHqSprSsjwDAMpByH5rbzEiHCbM04mawZp0tBlegUUAkSWZMBQihwWQ/lUHPk2OdI0EVCjLRmBDnenimyMadlWoKjJrpjM4IR0lpLx2ujLAqCJERrspoh4RhkJ0QgglaikQDxODgaM2YMioqK8PzzzyMxMbHF5nhnz56NFStWoLCwEFKpFOXl5Q2OKSoqwqOPPor169dDrVZj9OjRmDNnjlNPpg0bNmDixIn4/fffkZKSgunTp2PMmDEt8hqIC/V68Fv2gt+wyx4UCTGRMA29CuZeTTf4a2lCpAZiovPO9mKkBlxpOVIiUsD7sAWLqm9fVCxdCnNZGWq2bfNopZm5qgo1mzcDCL+tQhwxYMBzDHgOQDOFt6JOBzZFC6NJhMEswGAWYDQJMFq/N5gEGM1iwKb0muWqU7bjajVrcCRKpJaskQNRoXQOjnLzIK79EUJyaqP7qrGmOkitU2pGRVxgm7UGiFzCIidRQ4XXJKA8/k2+ZcsWbN68Gd27dw/AcJpmMBhw2223IT8/H4sXL25wv9lsxogRI5CQkIBt27bh7NmzuO+++yCRSPDyyy8DAI4fP44RI0Zg3Lhx+PTTT7F27Vo89NBDSExMxLBhw1r09RCrej34zXstmaJayy95ITYSpiFXwXxFZ4ALnV/eoloJMTWxwdJwXqlCWkI2+HrfsqYMzyOioADln32GypUrobr2WjBu7hVXvW4dRIMBkrQ0yHJzfRpHuJCpVZBIeaCZBJytePxSwCRcCqhs182WQMqvXBRlWwIjh397UYCQnNpgixxRqQLKy+zXhcR2qJn+smU6sZGtQ1hTHSS2lWpe9jgKpgg5j+yECEhC6PcCCU8eB0cpKSkNptJawsyZMwEAS5YsafT+VatW4eDBg1izZg3i4+PRvXt3zJo1C08//TRefPFFSKVSLFq0CBkZGZg7dy4AICcnB1u2bMG8efOaDI70ej30DoWPlZWV/n1hbVW9HvzmPZZMkT0oirJkinrkhlRQBACiXAoho12DjsMsw6KjriNkQjUMJ4t8fh51//6o/P57mEpKULdnD5R9+jT7GMFgQNXq1QAsWSNvsrmMRAJAhGg0efzYUMW42caAYxlwLNdsJkKwTuHZMk5Ga/BkMDsHVJ6s0hPlisaLshnWoSAbEKOjIUY0XPreaL8kpXWxTCOZI84hc2RQhn47DEfRaik6xKr9tvUHIa54HBzNnz8f06ZNw3/+8x+kp6cHYEje2b59O/Ly8hAff2n59LBhw/Doo4/i999/R48ePbB9+3YMHjzY6XHDhg3Dk08+2eR558yZYw/MiB/U68Fv2gN+42VB0TBrUORmpqQliRIeQmZKg0/tLMOig64D1FI1xCipX4IjVi6HesgQVC5bhsoVK6Do3bvZYKdm61YIVVXgYmLcCqYaw8dEQ5KYCMPff8NUes6rc4Qa1s8ralmWgYzlIOObmc4TRXvwZMs4iXCIlsRLF4IQA5No3SjW4Q6Wl4PDpc7eSdlpgEzRIOgStTKYS+SAeOk5bMcwkSowarl9TAAgMZihMFiW8fNRqYiNkMJoFlFRZwx62wVXknRypEbRUn3ScjwOju644w7U1taiffv2UCqVkEicC+LKysqaeGRgFRcXOwVGAOzXi4uLXR5TWVmJuro6p95NNs888wwmTpxov15ZWYmUlNa7wiNo6uot02eOQVFcFExDr4a5R05IBkWApZeRkJncYGNbCStBVmQWlNaCVkYqBafVwFzhe2YxYvBgVK1cCcOJE9AfPAh5585Nj08QUGXbKmTYMDBeFiBzOh0YiQSyjAzwsXEwnDwBobph/6bWgpHwYD0saPfbczMMpDzj1rYYZiYK9WUlDe9g5YDp0iqs5Eg5IGs8E1aXEAWhpuG/lSIuomET0HoA9ZbgKCa5A2LiLCsu9SYzzlcbUFpZj3pj83u5tSRaqk+CwavMkb9MmzYNr776qstjDh06hE6dOvntOT0lk8mcuoETD9XVW6fPdoOpswVF0ZZMUffQDYoAh15GSudfzEpeiY6RHSHlnP/48tHRfgmOuIgIqPr3R/Xq1ahYvtxlcFS3bx9MJSVglEqovdz0meE5sA7dlDm1CvLcXJjPn4fh1KlWOdXWWjqDNzZOhmPBqNVgWd2lG4Wm/w1YlbLR4KjBnnqCGTDWATXWzGDkpWaqMp5DO50C7XQKVNQZca6qHheqDQhWrTpgWarfMT4CUargBLmkbfM4OBrth72fbCZNmtTsSrHMTNc7btskJCRg165dTreVlJTY77Nd2m5zPEaj0TSaNSI+qKu3Tp85BEXx0ZZMUfdOIR0U2Yjt4uy9jGw0Ug3a69qDZxu+dbjISDDsCYh++IuiKShA9bp10B88CP1ff0HWxPvAvsHsoEFgvdx9ntNqG0xXMAwDPjYWXGQkjH//DWNpKRDC0y6Xay3BEcNxkMTHAbwErFIBVqkEI5OBqTkPlDr8wIWmszmcWt34VOjlwZGxFqgtswRaDAtoG2/3oFVIoFVIkB4t4EKNAaWVelTrWzZApqX6JNjcCo4qKyuh0Wjs37tiO84dsbGxiI2Nbf5AN+Tn52P27NkoLS1FnHVn89WrV0Oj0SDXunonPz8fK60dhG1Wr16N/Px8v4yBwBIUbdxtCYrqLYXsQnw0TMOuhrlb6wiKAGsvo1jnDtMxihika9KbrHtgeB6cTgdTWROd/TzAx8RA1bcvarZuReWKFYh9/PEGx+iPHoXh2DGA5xExZIjXz8XpdE3ex/A8pOnp4GNjYTh5Euaqaq+fpyW1luAIAKSN1W7aNp4GA0BsJnPUsLaKYZmGKx1NeqDa+uFQFQtIXH8g5DkW8Ro54jVy1OhNKK3S43y13vNGnB6ipfokFLgVHEVGRuLs2bOIi4uDTqdr9I+DKIpgGCZgTSCLiopQVlaGoqIimM1mFBYWAgA6dOgAtVqNoUOHIjc3F/feey9ee+01FBcXY/r06Rg/frx9WmzcuHF46623MHXqVDzwwANYt24dvvrqK6xYsSIgY25TGg2KYizTZ60oKAIa72WUrE5Gorr51T1cTIxfgiMAiBg+HDVbt6Ju714Yz56FJNH5+W1ZI1W/fi4DHJcYuLXRLauyTLWZzp2D4dTfEI1G757P1VA4FqIg+CVD1ZqCo0bZfseynCUwEpr+eTMKheVnZ3bILjWyjB+CCagutXyvjgd498sFVDIeGTIeaVFKXKgx4FyVHhV1/v8/QEv1SahwKzhat24doqz7NK1fvz6gA2rKjBkz8OGHH9qv9+jRwz6eAQMGgOM4LF++HI8++ijy8/OhUqkwevRovPTSS/bHZGRkYMWKFXjqqaewYMECJCcn47333qMeR76orbMERZv2XAqKEmIs02fdOlkKB1oRUa1w6mXEgEGGNgPRimi3Hs9ptWB4DqLJ9w8J0uRkKHr0QN0vv6Dyxx8R/cAD9vuMxcWo++UXAL41feQiIqzL+N1jn2o7fRrGkhKfAxlWJgWn04HT6cBqNDCcPOnzajmGZcC09mlyW5d1W5NGc9OZI4ZhwKpUMFdWXbpN0lRwZM0cqeMAzvNaHpZlEBshQ2yEDPVGM85V6VFapYfB5HsRNy3VJ6HEreCof//+jX7fkpYsWdJkjyObtLS0BtNmlxswYAB+sf5RIT5oLChKjIVx6FUQura+oAiw9TJKtme5OIZDx8iOiJBGNPPISxiWBRcZBdM5/yyH14wYgbpffkHNli3Q3nSTfTPZqh9/BEQRiu7dIUlK8vr83mScGJ6HNC0NfEyM51NtjKVGxh4QXZbhkSQlWX52PgRdjCIMlnzbgiJbkORiWg1Aw+CosVWLggmosWaOIpIaNDP1lFzCISVKieRIBcprjSit0uNircGrlgC0VJ+EGq/3OqitrUVRUREMBoPT7V27dvV5UCSE1dSB37gL/Oa9lwVFV0Pomt0qgyKgYS8jKSdFVmQWFLznGQg+JtpvwZGsY0fIsrKgP3oUVatWIfLOO2GuqED11q0ALFNvvvB6Og4OU23nz8NQdKrJqTaG58BptZaASKt1maliZTJI4uJgLCn1flytfUoNsEynAZemo0XXmUhWrXa+odFpNfOlzJG2nY8DvIRhGESqpIhUSWEwCThfbckm1Rmaz54yDJAerUKC1rvFBIQEisfB0blz53D//ffjR2u9w+Vo49kwZQuKNu2x70AvJMbCOOxqCHmtNygCAJFlnHoZqSQqdNR1hITzbqUMGxEBRiqBaPBPTYbmhhtw7s03Ub1+PbQjR6JqzRrAaIQ0MxOyrCyvz8vKZWD9MP3Ex8SA0+lgPHMGxuJiSxNDhfxSdigiwqOMAG/NHnm76o9VhUFwZJ9Ws16aXf9furwom2msUaVgAqpswVGyryNslJRnkaRTIEmnQGW9EaWVepTVGBrdv46W6pNQ5nFw9OSTT6K8vBw7d+7EgAED8N1336GkpAT/93//Z9+Wg4SRmlrwG3aD3+wQFCXFWYKiLlmtOigCbL2M2tl7GelkOmRqM8Gx3q+UYRgGfHQ0jGeL/TJGedeukCQnw/j336hcuRLV1ro/zfDhPk1D+JI1uhzD85CmpoKPjQUYxuu2AgDASqXg4+JgLG6kOaI7jw+HzJF9Ws16KTSTOZLJwEh4e0+qBj2OAMtqNVuPI126nwbaNI1cAo1cApNZQFmNAaVVelTVW8Yn5RlkJ2iglnm/UTMhgeTx/8x169bh+++/R69evcCyLNLS0jBkyBBoNBrMmTMHI0aMCMQ4SUurrr00fRaGQZGN2C4O0FlqiuKV8UiJSPFL3YM/gyOGYaC54QZcWLQIlcuXW84fFwdFz54+ndefwZGNPzJRACBJTISptNSr7FFYBEfsZQXZLlar2R+iUsNcXm650ti0WnWxZXqO4QCN93VqnuI5FnEaOeI0ctQaTDhfZUCcRkZL9UlI8zg4qqmpsfcRioyMxLlz55CVlYW8vDzs27fP7wMkLay6FvyGXeC3OARF7eItQVHnjmETFAGW7UtsvYxSI1IRr4pv5hHuY1UqsAo5BGsDTF8p+/RB+TffwHzesvVDREFBwz42HmA41qkrdqhhpFLw8fEeB5isXOb1FiohhWEsX6x7BdmAtSjbGhw1Oq1WcdpyqYoFpMEJIJVSHqnRlC0ioc/j/6XZ2dk4cuQI0tPT0a1bN/sGtIsWLUJiYvN9YEiIsgVFm/eAsdbK2IOiLh19XtkSakRdBMSkODBg0F7XHpHySL8/Bx8dDcPfp/1yLobjoLn+elz8+GOwERFQXX21T+fjtFqfgquWIElIsGSPzO4vEw+LrJENwzmsVmu+lpNTq2DLLzU6rVZ5xnLp5TJ+QtoSj4OjJ554AmfPngUAvPDCCygoKMCnn34KqVTa7FJ7EoKqa8Gv32nJFDkGRQXXQOjcIeyCIsDSy0hIS4KEk6JjZEeoJP7dvd2Gi44G/BQcAYB6wAAItbWQdewI1sf9/gIxpeZv9uzRmbNuPyasgiOWu7Razc3MkV1j2bMqW3AUT8ERIc3wODj65z//af++Z8+eOHnyJA4fPozU1FTExMT4dXAkgBoLipITrNNn4RkUAZd6GcklSmRFZUHGBW5TYVYuB6tW+W13e4bnof3HP/xyrtYQHAHW7FFJidvZIyacgiOGdbvPEQAwEglYmRSC3tAwcySKQJV1itLD7tiEtEUeBUdGoxGdOnXC8uXLkZOTAwBQKpW44oorAjI4EgBVNZagaOu+NhUUAYDIcxAyUxChjEQHXYdGN4/1Nz4mBgY/BUf+wqpVHnXFDiZGIgGfkADj6TNuHd/YPmOtFsO61SHbEatWQ9CXNay7ctw6JCIe8LJNBSFthUd/HSQSCerr/VNgSlpYY0FRijUoyg3voAiw9TJKQbQ2EenadLBMy9Tb8JGRMJw8GVI72vOtJGtkI4mPt2SPmtmShZHwYKVhNF3EsJcKskU3gyOVCrhQ1jBz5Lh1iCYwPY4ICScef3QeP348Xn31Vbz33nvgGyv6I6Glqgb8uh2WoMjaA0VITbR0tM5tH/ZBkY2Q1g6JcZlIjmjZPwyMVApOq4W5vKJFn9cVLtL/xeeBxEgk4OMTYDztun4rrOqNAEtg5EFBNuDQKfvy382meqDGstIRuhQ/DZCQ8OVxdLN7926sXbsWq1atQl5eHlSXpbG//fZbvw2O+KCy+lKmyDEoGnY1hJy2ExQBgJAcj/TUPMQqY4Py/Hx0dMgER6xM2iqDCElCPEwlxS6zR63xdbnEcA59jtzMHCmVYDi24UrE8lOWHkcsD0TQqmJCmuNxcKTT6XDLLbcEYizEHyqrIVm3A9y2X5yDooJrIHTKbFNBEQAw8bHomNUXWpk2aGPgIiPBcKxHS9IDNpZWNqVmw/A8JAkJLlsjhF9wxHrU5wiwtHxotH9VeZHlUhULSPzTqJOQcOZxcPTBBx8EYhzEVxXWoGi7Q1CUlmTJFLXBoAgAuKhIdOhyLVTS4BbpMhwHTqeD6UJZUMcBtN7gCIBlWX9JiX2LjMuFXXDEep45Aiw9rBqw9ThSxdAyfkLc4HFV6qBBg1Bua1HvoLKyEoMGDfLHmIgnKqoh+W415LPfAb9pNxijCUJaEvSP3AH9E/e1uSk0G5k2Ep2uGBL0wMiGi44O9hAsXbE1mmAPw2u27FGj97EMGD9tXRIyHFereRIcNfZvXF9uuZRG0DJ+QtzgceZow4YNMBgMDW6vr6/H5s2b/TIo4oaKKkjW7gC3o9A5U1RwDYTsjDYZENko1ZHo0HMIeIn3m5/6G6fTOW0MGgysRhPyXbGbw8fHw1hc3ODnyCiUftkTL6Q49TlyryAbaCKDVlduuZSpaRk/IW5wOzj69ddf7d8fPHgQxcWX9jwym8346aef0K5dO/+OjjRkC4q2/wLGWpxqTm8H07Cr23xQBABaVTTSew0CJwudwAiwbB7LRUbCVHouaGNozVNqNgzHQZKYCEPRKafbw25KDfC4Q7ZL9syRGghg41NCwoXbwVH37t3BMAwYhml0+kyhUGDhwoV+HRxxUF4Fybrt4LYXOgdFBddAyEpv80ERAMSq4pB0xdXgQnR6hY+JCWpw1Nr6GzWFj4uD8WwxROOlnepZVRgGR15mjhpVb10tKVXTtBohbnA7ODp+/DhEUURmZiZ27dqF2NhLy6KlUini4uLAhcNu2EEk5SSIU8bBKBhhEIwwmY0wll0At2abZfrMFhRlJFsyRRQUWTFIVCcgtksvcLY+LyGIVavt2zu0+HOrVGDCpEEiw3GQJCXCcLLIfltYZo68WMrfpPpKy6U84tIKOEJIk9wOjtLS0gAAghD85cjhimd5RMqjAACmsjJULl+O6o0bAZPlFyPfIRP8DUNh7pgOk2iESTDBYDYipNovtzCO5ZCkbgdd+07gQ7y5IcMw4KKjIXiwkaq/hMOUmiNL9ugsRGu3dzZEs4U+YTmHDtk+Zo70tuAotN8jhIQKjwuyP/zwQ8TExGDEiBEAgKlTp+Ldd99Fbm4uPv/8c3sQRbxjunABlStWOAVFsqwsaG++GbKcnAZFpyJEmAUTDIIRRsEIo9lguRRMMJgNMPv6iTOESTgJ2qmToW6X2uQqplDDR0d7tMu8v4RbcMSwLCRJSTCcOAlWLmu4XUY48HK1WqNswZFC59t5CGkjPP6N8vLLL+Odd94BAGzfvh1vvfUW5s+fj+XLl+Opp56iDtleMp49i/P//jfKv1t2KSjKzrYERZ06NbkShwEDnpWAZxtfgSJCgNFsglEwOE3XGQQDTIIJZl9rGYJEzsvRLiIZ8uhYSFJTgz0ct7FKJVilEkJtbYs9JyOVgFOHRksDf+JjY2E8cyY8p9QA5yaQbm482yR9leVSEeXbeQhpIzwOjk6dOoUOHToAAJYtW4Zbb70VY8eOxVVXXYUBAwb4e3xtRv2hQyj/+hsAgKxTJ2hvugnynByfz8uAhZSTQtpE4zdBNFuCJrMl82QSLIGTLaASxdCbslNLI5CkTgSnjoC0fftWt4Sbj46CoQWDo3DLGtlYskftAJOx+YNbI8eCbJ+n1aotl8rg99sipDXwODhSq9W4cOECUlNTsWrVKkycOBEAIJfLUVdX5/cBthXqgQOhvfUWyDp09EtQ5C6W4SDjOMi4xpe+mwSTdZrOEjBZMk62KbyW/6MUKY9CrDIWnFwOeVZWq+zbw8XEAKf+brnnC9PgCAD42BiI4fp7x6lDtg/BkVEPmKw/IwqOCHGLx8HRkCFD8NBDD6FHjx44evQohg8fDgD4/fffkZ6e7u/xtRkMwyDh2WdRd+C3YA/FCc/y4FkeCjQseBUhXgqUrDVOJmsGyiAY/V7vFKeMQ6Q8CoyEhyw7G4ykdTazY6VScJoImCurAv5cDMs03jE5TDAsC0YVflOGACxZI9YPS/lrL1z6XhXj25gIaSM8Do7efvttTJ8+HadOncLSpUsRbd0WYe/evbjrrrv8PkASuhgwkLBSSNjGp+xECDCYrVN1ZgOMtiyUtWhcEN1b+cgwDJJU7aCWqsGwDGRZWWDlodXk0VNcVHSLBEesRguGWmy0Tv4qyLYFR7wCkIZpfRYhfuZxcKTT6fDWW281uH3mzJl+GRAJHwxYyDgZZJwMqkaSPGbRbJ2uM1oDJ4Ml++TQooBjeSRHtIOcs2SupO3bh3QvI3fxUZEwnDwR8C4M4TylFvZYzqEJpA/BUZ11w2MZdccmxF1erX8tLy/Hrl27UFpa6tT3iGEY3HvvvX4bHAlvHMOB4xT2wMeRrUUBGAY8Y/lvKk1LBR8VHqttGIkEnE4H88XygD4PF6kL6PlJADHspe1DfCnItgVHUhXQxMIMQogzj4OjH374Affccw+qq6uh0WicVgpRcET8xdaiwEaSEN9qehm5i4+ODmhwxCqVYMOkK3ab5LR9iC+Zo4uWS5nmUrBFCHHJ43fKpEmT8MADD6C6uhrl5eW4ePGi/ausrCwQYyRtHBepa1W9jNzFRUaC4QL3x4qyRq0cwwCs9fOrLwXZjsERIcQtHv9mPn36NCZMmABluDZeIyGFVasga4W9jNzBsCy4AE4TUr1RGPDHarW6csulXOvzcAhpKzwOjoYNG4Y9e/YEYiyEOOFjoiHPzg7r1VaBqqFiJBKw4brEvS2xZ458mFarL7dcUnBEiNs8rjkaMWIEpkyZgoMHDyIvLw+Sy3rN/OMf//Db4EjbxEh4SNPTw6b42hVWqwUjkUA0+rehJqfThmW2rc2xBUe+FGTXV1guaV81QtzmcXD08MMPAwBeeumlBvcxDAOzuXXu1UVCAxepgyw9HUwbKSRmGAZ8dBSMxSV+PS8XSbuvhwV/1BzZgiO5zufhENJWeBwcOS7dJ8RfGJ6DNC0NfEzb6+DLRUf7NTgK967YbYpfgqNKyyVljghxW6tZ1zl79mz069cPSqUSukYKTffv34+77roLKSkpUCgUyMnJwYIFC5yO2bBhAxiGafBVXFzcQq+CNIbTaSHPy2uTgREAcGo1WLn/mvOxGk1Y12m1KfZpNR9qjvS24IiyiYS4y6smkBs3bsQbb7yBQ4cOAQByc3MxZcoUXHPNNX4dnCODwYDbbrsN+fn5WLx4cYP79+7di7i4OHzyySdISUnBtm3bMHbsWHAch8cee8zp2CNHjkDj8Mk6Li4uYOMmTWM4FpKUVEji6efPRcdAOH3aP+eiVWrhg/ND5khv3aaGNp0lxG0eB0effPIJ7r//fowaNQoTJkwAAGzduhXXXXcdlixZgrvvvtvvgwQubU+yZMmSRu9/4IEHnK5nZmZi+/bt+PbbbxsER3FxcY1mn0jL4SLUkGZmtvo90vyFj46CkYIjcjl/TKvZgiPKHBHiNo+Do9mzZ+O1117DU089Zb9twoQJePPNNzFr1qyABUfeqKioQFQjK566d+8OvV6PLl264MUXX8RVV13V5Dn0ej30er39emVlZUDG2lYwLANJSgr4+HhaTeWAVSjAqlQQamp8O49SAVZG+2eFDVuXeG+n1QQzYKy1fK9sm9PWhHjD45qjv/76CyNHjmxw+z/+8Q8cP37cL4Pyh23btuHLL7/E2LFj7bclJiZi0aJFWLp0KZYuXYqUlBQMGDAA+/bta/I8c+bMgVartX+lpKS0xPDDEqtSQd6lCyQJCRQYNYKP8X3ag7JGYcY+reblQphah10LaFqNELd5HBylpKRg7dq1DW5fs2aNx4HDtGnTGi2Qdvw6fPiwp0PEb7/9hhtvvBEvvPAChg4dar89OzsbjzzyCHr27Il+/frh/fffR79+/TBv3rwmz/XMM8+goqLC/nXq1CmPx9PmMYA0uR3knXPBKhpuMkss+KgowMeYkYKjMONrE8ja85ZLXg5I6L1HiLs8nlabNGkSJkyYgMLCQvTr1w+ApeZoyZIlDVaHuXOuMWPGuDwmMzPTo3MePHgQ1113HcaOHYvp06c3e3yfPn2wZcuWJu+XyWSQ0TSF11ilArLMTOrW7AZGKgWn0cBc4d3ULSPhwarVfh4VCSr7tJqXNUc1FyyXUjVtOkuIBzwOjh599FEkJCRg7ty5+OqrrwAAOTk5+PLLL3HjjTd6dK7Y2FjExsZ6OoQm/f777xg0aBBGjx6N2bNnu/WYwsJCJCYm+m0MxIoBJImJkLRrB4Z+KbuNj472OjjitNQVO+z4urdanXVaTRbhn/EQ0kZ4tZT/5ptvxs033+zvsbhUVFSEsrIyFBUVwWw2o7CwEADQoUMHqNVq/Pbbbxg0aBCGDRuGiRMn2nsXcRxnD8Dmz5+PjIwMdO7cGfX19Xjvvfewbt06rFq1qkVfS7hj5TJIMzPBRdAvZE9xkZFg2BMQBdHzx9KUWvjxdbVa3UXLJQVHhHjE7Y/0Fy9exMKFCxtdrVVRUdHkff4yY8YM9OjRAy+88AKqq6vRo0cP9OjRw74J7jfffINz587hk08+QWJiov2rd+/e9nMYDAZMmjQJeXl56N+/P/bv3481a9bguuuuC9i42xpJfBzkXbpQYOQlhue9C3IYS+aIhBlf91azBUdy6phOiCfcDo7eeustbNq0yal5oo1Wq8XmzZuxcOFCvw7O0ZIlSyCKYoOvAQMGAABefPHFRu8/ceKE/RxTp07FsWPHUFdXhwsXLmD9+vUYOHBgwMbcljBSCeSdsiFNT6fuzD7ivOgUzmk0YHivEsEklPkrcySnwJkQT7gdHC1duhTjxo1r8v5HHnkE33zzjV8GRVoXPjYGirw8ylz4CafVguE9CzBpSi1M+Zo5qi+3XMrovUmIJ9z+qPnnn3+iY8eOTd7fsWNH/Pnnn34ZFGkdGIkE0ox08LQDvF8xLAsuMgqmc+fcfgwFR2HK54LsCsslbTpLiEfczhxxHIczZ840ef+ZM2fA0qqkNoOPjoIirwsFRgHiSUNIViGnbVjCla/TanprcCTX+WU4hLQVbkczPXr0wLJly5q8/7vvvkOPHj38MSYSwhgJD1mH9pB16ABGIgn2cMIWGxEBRurez5eyRmHM52k1W+aIPsQQ4gm3p9Uee+wx3HnnnUhOTsajjz4Kzlp0azab8e9//xvz5s3DZ599FrCBkuDjdDrIMtLBSKXBHkrYYxgGfHQ0jGeLmz2WgqMw5nPmyLqCWEnBESGecDs4uuWWWzB16lRMmDABzz33nL1z9V9//YXq6mpMmTIFt956a8AGSoKH4VhIUlMhiYsL9lDaFHeCI4bnwFLbhPBlzxx5ubdafZXlkjJHhHjEo7W/s2fPxo033ohPP/0Ux44dgyiK6N+/P+6++2706dMnUGMkQcRpIiDNzKSd3oOAVanAKuQQ6uqbPIa6Yoc5XwuyDdWWS6Xn7SEIacs8bozSp08fCoTaAIZlIElNBR8XR398g4iPjobh79NN3k9TamHOl5ojQQAMNZbvFVH+GxMhbQAtLyMNsGoV5F26QBIfT4FRkHHRLlatUVfs8OdLzVF9BQDrNjQqyhwR4glqqUsuYQBpcjL4xEQKikIEK5eDVasgVNc0uI+LiKAVg+GO8yFzVHvBeg4ZIFX6b0yEtAEUHBEAAKtUQpaZAValCvZQyGX4mBgYGguOaEot/LHW4NebzJEtOJKp/TceQtoImlZr6xhA0i4J8s65FBiFKD4yEmgkkUfBURtgK8j2JXMko9WMhHjKp8zR+fPnsXPnTpjNZvTu3RuJiYn+GhdpAaxCDmlmJjg1fbIMZYxUCk6rhbm8wn4bK5eBVSiCOCrSImyZI2+W8teVWS4pOCLEY14HR0uXLsWDDz6IrKwsGI1GHDlyBG+//Tbuv/9+f46PBIgkIR6SlBQwtOVLq8BHRzsFR5Q1aiN8Wcpfd9FySZvOEuIxt4Oj6upqqB0yDDNnzsSuXbuQlZUFAFixYgUefvhhCo5CHCuTWrJFGk2wh0I8wEVGguFYiGZLBoGCozbCl5ojW3Akp/c6IZ5yO23Qs2dPfP/99/brPM+jtLTUfr2kpARS2lYipPFxsZDn5VFg1AoxHGcPiBiOpa7YbYVttRpES98iT9SVWy7llDkixFNuZ45+/vlnjB8/HkuWLMHbb7+NBQsW4I477oDZbIbJZALLsliyZEkAh0q8xUglkGVkULahleOio2G6UGbpik3ToW0D4/ArWjTDozU09eWWS7nOjwMipG1wOzhKT0/HihUr8Pnnn6N///6YMGECjh07hmPHjsFsNqNTp06Qy+WBHCvxAh8TDWlaGhieuja0dpxOB0bCU5DblnAOfawEs/P15tRbN51V6Pw6JELaAo8/ft51113YvXs39u/fjwEDBkAQBHTv3p0CoxDDSHjIOnaArH17CozCBMMw4KOiKDhqS2wF2QAgmDx7bL21gJ+2DiHEYx791Vy5ciUOHTqEbt264b333sPGjRtxzz334Prrr8dLL70EBS0tDglcpA6y9HQwVAMWdiRJSdQVuy1xyhx5GBzpbZmjSP+Nh5A2wu3M0aRJk3D//fdj9+7deOSRRzBr1iz0798f+/btg1wuR48ePfDjjz8GcqykGQzPQdY+E/KsLAqMwhT9u7YxrENw5GmvI1twpKTMESGecjs4WrJkCVauXIkvvvgCu3fvxscffwwAkEqlmDVrFr799lu8/PLLARsocY3TaSHPywMfQxtMEhI2fJlW01dZLmlajRCPuR0cqVQqHD9+HABw6tSpBjVGubm52Lx5s39HR5rFcCyk6emQZ2eDpawCIWGGARgvGkEKAqCvtnyvog9MhHjK7ZqjOXPm4L777sOECRNQW1uLDz/8MJDjIm7gItSQZmaCpWJ4QsITwwAMa1nG70nmSF8JQLR8T8ERIR5zOzi65557UFBQgL/++gsdO3aEjlbMBA3DMpAkJ4NPSADDNLIjKSEkPDCMZWpNMHq2+WyttTs2JwUktFCGEE95tFotOjoa0dHRgRoLcQOrUkGWmQFWqQz2UAghLcGbabW6C5ZL2nSWEK9QA5zWggEkSe0gaZdE2SJC2hJbN3RPptVqrMGRVO36OEJIoyg4agVYpQLSjExwalWwh0IIaWleZY7KLJe06SwhXqHgKMRJkhIhadeO9tIipK1ivMgc2YIjGQVHhHiDgqMQxcplkGZmgqPd1wlp22y9jjwpyK4rt1zSprOEeIWCoxAkiY+DJCUFDMc1fzAhJLzZM0eeBEfW1Wo0rUaIVyg4CiUcB3mnbHBabbBHQggJFbbMkSfTarZNZylzRIhXKDgKIaxMBshkwR4GISSUMNZf094ER0radJYQb1CVLyGEhDLbYgyzN5kjCo4I8QYFR4QQEsq8Wcqvr7RcKmnTWUK80Wqm1WbPno0VK1agsLAQUqkU5eXlDY5prDni559/jjvvvNN+fcOGDZg4cSJ+//13pKSkYPr06RgzZozfx2s2m2E0Gv1+3rZGIpGAo8J00pbZm0B68Puk3hocKSg4IsQbrSY4MhgMuO2225Cfn4/Fixc3edwHH3yAgoIC+3XHPeCOHz+OESNGYNy4cfj000+xdu1aPPTQQ0hMTMSwYcP8Mk5RFFFcXNxo8Ea8o9PpkED7yJG2ypuaI32V5VJF2z0R4o1WExzNnDkTALBkyRKXx9n+kDZm0aJFyMjIwNy5cwEAOTk52LJlC+bNm+e34MgWGMXFxUGpVNIfdB+Iooja2lqUlpYCABITE4M8IkKCgPVwWk0UAYM1OFJScESIN1pNcOSu8ePH46GHHkJmZibGjRuH+++/3x6gbN++HYMHD3Y6ftiwYXjyySebPJ9er4der7dfr6ysbPJYs9lsD4xog17/UCgsO4qXlpYiLi6OpthI28N4OK2mrwJEwfI9BUeEeCWsgqOXXnoJgwYNglKpxKpVq/Cvf/0L1dXVmDBhAgBLVic+Pt7pMfHx8aisrERdXZ39D7GjOXPm2LNWzbHVGCmVSh9fCXFk+3kajUYKjkjbw9qm1dzMHNWXWy45CSBp+DuNENK8oK5WmzZtGhiGcfl1+PBht8/3/PPP46qrrkKPHj3w9NNPY+rUqXj99dd9GuMzzzyDiooK+9epU6eafQxNpfkX/TxJm+bp3mq11u7YUnVgxkNIGxDUzNGkSZOaXSmWmZnp9fmvvPJKzJo1C3q9HjKZDAkJCSgpKXE6pqSkBBqNptGsEQDIZDLIqDEjISRYWA8LsmvPWy5ltC8jId4KanAUGxuL2NjYgJ2/sLAQkZGR9uAmPz8fK1eudDpm9erVyM/PD9gYCCHEJ55uH1JbZrmU0b5qhHir1dQcFRUVoaysDEVFRTCbzSgsLAQAdOjQAWq1Gj/88ANKSkrQt29fyOVyrF69Gi+//DImT55sP8e4cePw1ltvYerUqXjggQewbt06fPXVV1ixYkWQXhUhhDTD0yaQddbgSE57NBLirVYTHM2YMQMffvih/XqPHj0AAOvXr8eAAQMgkUjw9ttv46mnnoIoiujQoQPefPNNPPzww/bHZGRkYMWKFXjqqaewYMECJCcn47333vPbMn5CCPE7T5fy11lrjuSUOSLEW60mOFqyZInLHkcFBQVOzR+bMmDAAPzyyy9+HBkhhASQPXPk5lJ++75quoAMh5C2gPZWCzBRFFFrMAXlSxRFt8c5YMAAPPbYY3jssceg1WoRExOD559/3n6O9PR0zJo1C3fddRdUKhXatWuHt99+2+kcRUVFuPHGG6FWq6HRaHD77bc3KIAnhHjI08wRBUeE+KzVZI5aqzqjGbkzfg7Kcx98aRiUUvf/iT/88EM8+OCD2LVrF/bs2YOxY8ciNTXVPjX5+uuv49lnn8XMmTPx888/44knnkBWVhaGDBkCQRDsgdHGjRthMpkwfvx43HHHHdiwYUOAXiEhbYBttZroZkG2LThSRAZmPIS0ARQcEbuUlBTMmzcPDMMgOzsbBw4cwLx58+zB0VVXXYVp06YBALKysrB161bMmzcPQ4YMwdq1a3HgwAEcP34cKSkpAICPPvoInTt3xu7du9G7d++gvS5CWjXWwz5HFBwR4jMKjgJMIeFw8KXgFHwrJJ51k+7bt69Tw8X8/HzMnTsXZrPZft1Rfn4+5s+fDwA4dOgQUlJS7IERAOTm5kKn0+HQoUMUHBHiLcbTDtkUHBHiKwqOAoxhGI+mtgghxImnfY701v0fVbSvGiHeooJsYrdz506n6zt27EDHjh3t+5nt2LGjwf05OTkAgJycHJw6dcppe5WDBw+ivLwcubm5AR45IWHM24JsBQVHhHiLgiNiV1RUhIkTJ+LIkSP4/PPPsXDhQjzxxBP2+7du3YrXXnsNR48exdtvv42vv/7afv/gwYORl5eHe+65B/v27cOuXbtw3333oX///ujVq1ewXhIhrZ8nmSNRBOopc0SIr2i+h9jdd999qKurQ58+fcBxHJ544gmMHTvWfv+kSZOwZ88ezJw5ExqNBm+++aa9gSbDMPj+++/x+OOP49prrwXLsigoKMDChQuD9XIICQ+e1BwZqgHRehzVHBHiNQqOiJ1EIsH8+fPxzjvvNHq/RqPBV1991eTjU1NT8f333wdqeIS0TZ5kjurKrY/hAYkyYEMiJNzRtBohhIQye58jNzJH9eWWS1kE4LDylBDiGQqOCCEklHlSkG0rxpZFBG48hLQBNK1GAKDZLtYnTpxokXEQQi7jybQaBUeE+AVljgghJJSxHhRkm/SWS04euPEQ0gZQcEQIIaHMk+DIll3iJIEbDyFtAAVHhBASyhjrtJo7G8+ajZZLCo4I8QkFR4QQEso4TzJH1uCIpXJSQnxBwREhhIQyxoPVavZpNWngxkNIG0DBESGEhDJP+hyZKTgixB8oOCKEkFBmL8h2o+bINq3GU3BEiC8oOCKEkFDmSXBkK8hmqSCbEF9QcEQIIaHMHhwJzR9rq0viZYEbDyFtAAVHgSaKgKEmOF+i6PYwv/nmG+Tl5UGhUCA6OhqDBw9GTU0Ndu/ejSFDhiAmJgZarRb9+/fHvn37nB5bXl6ORx55BPHx8ZDL5ejSpQuWL1/u758kIW2TJx2yBVrKT4g/0HrPQDPWAi8nBee5nz0DSFXNHnb27FncddddeO2113DzzTejqqoKmzdvhiiKqKqqwujRo7Fw4UKIooi5c+di+PDh+OOPPxAREQFBEHD99dejqqoKn3zyCdq3b4+DBw+C47gWeIGEtAG24MitgmyaViPEHyg4Ijh79ixMJhNGjRqFtLQ0AEBeXh4AYNCgQU7Hvvvuu9DpdNi4cSNuuOEGrFmzBrt27cKhQ4eQlZUFAMjMzGzZF0BIOPOoQ7Ytc0S/2gnxBb2DAk2itGRwgvXcbujWrRuuu+465OXlYdiwYRg6dChuvfVWREZGoqSkBNOnT8eGDRtQWloKs9mM2tpaFBUVAQAKCwuRnJxsD4wIIX7m0Wo1s/NjCCFeoXdQoDGMW1NbwcRxHFavXo1t27Zh1apVWLhwIZ577jns3LkTjz76KC5cuIAFCxYgLS0NMpkM+fn5MBgMAACFQhHk0RMS5ux9jtwoyKZpNUL8ggqyCQCAYRhcddVVmDlzJn755RdIpVJ899132Lp1KyZMmIDhw4ejc+fOkMlkOH/+vP1xXbt2xd9//42jR48GcfSEhDEqyCakxVHmiGDnzp1Yu3Ythg4diri4OOzcuRPnzp1DTk4OOnbsiI8//hi9evVCZWUlpkyZ4pQt6t+/P6699lrccsstePPNN9GhQwccPnwYDMOgoKAgiK+KkDBhywK5lTmyBlA0rUaITyhzRKDRaLBp0yYMHz4cWVlZmD59OubOnYvrr78eixcvxsWLF3HFFVfg3nvvxYQJExAXF+f0+KVLl6J379646667kJubi6lTp8JsdqN4lBDSPG8yRxQcEeITegcR5OTk4Keffmr0vh49emD37t1Ot916661O16OiovD+++8HbHyEtGm2zJFHG8/StBohvqDMESGEhDKvCrLpcy8hvqDgiBBCQplH02qUOSLEHyg4IoSQUOZRQTYt5SfEHyg4IoSQUMZ50gSSVqsR4g8UHBFCSCjzJHNkn1aj4IgQX7Sa4Gj27Nno168flEoldDpdg/uXLFkChmEa/SotLQUAbNiwodH7i4uLW/jVEEKImzzZW42m1Qjxi1bz8cJgMOC2225Dfn4+Fi9e3OD+O+64o0HTwTFjxqC+vr5BX54jR45Ao9HYr19+PyGEhAzOm41nKTgixBetJjiaOXMmAEuGqDEKhcKpc/O5c+ewbt26RgOpuLi4RrNPhBAScuzTah70OaKaI0J80mqm1Tz10UcfQalUNmhYCADdu3dHYmIihgwZgq1bt7o8j16vR2VlpdNXW2WbliwvLw/2UAhpO+xL+T2ZVqPgiBBfhG1wtHjxYtx9991O2aTExEQsWrQIS5cuxdKlS5GSkoIBAwZg3759TZ5nzpw50Gq19q+UlJSWGH6LGzBgAJ588kmPHnPkyBEMHDgQ8fHxkMvlyMzMxPTp02E0GgMzSELaIs6TgmyaViPEH4L68WLatGl49dVXXR5z6NAhdOrUyaPzbt++HYcOHcLHH3/sdHt2djays7Pt1/v164c///wT8+bNa3CszTPPPIOJEyfar1dWVoZtgOQpiUSC++67D1dccQV0Oh3279+Phx9+GIIg4OWXXw728AgJD/YO2WZAFAGGafpY+8azFBwR4ougBkeTJk3CmDFjXB6TmZnp8Xnfe+89dO/eHT179mz22D59+mDLli1N3i+TySCTyTweQ2syZswYbNy4ERs3bsSCBQsAAMePH8fBgwfx5JNP4tSpU+jbty9Gjx7t9LjMzEynf5+0tDRs2LABmzdvbtHxExLWGIcEv2B2vUyfao4I8YugvoNiY2MRGxvr13NWV1fjq6++wpw5c9w6vrCwEImJiX4dgyNRFFFnqgvY+V1R8Aowrj5lWi1YsABHjx5Fly5d8NJLLwGw1FqNGjUK48ePx9ixY7Fnzx5MmjTJ5XmOHTuGn376CaNGjfLL+AkhcA50RDNc/tq2T6tRcESIL1rNO6ioqAhlZWUoKiqC2WxGYWEhAKBDhw5Qq9X247788kuYTCb885//bHCO+fPnIyMjA507d0Z9fT3ee+89rFu3DqtWrQrYuOtMdbjysysDdn5Xdt69E0qJstnjtFotpFIplEolEhISAADPPvss2rdvj7lz5wKwTEkeOHCg0WnQfv36Yd++fdDr9Rg7dqw9wCKE+IGtIBuwZoZcZLKpzxEhftFqgqMZM2bgww8/tF/v0aMHAGD9+vUYMGCA/fbFixdj1KhRjS7VNxgMmDRpEk6fPg2lUomuXbtizZo1GDhwYKCH3+ocOnQIV17pHNTl5+c3euyXX36Jqqoq7N+/H1OmTMEbb7yBqVOntsQwCQl/jpmj5las0cazhPhFqwmOlixZ0mSPI0fbtm1r8r6pU6e2+B9tBa/Azrt3tuhzOj53S7AVqOfm5sJsNmPs2LGYNGkSOI5r5pGEkGYxl2eOXKCl/IT4Bb2DAoxhGLemtoJNKpXCbL70qTQnJwf/+9//nI7ZsWNHs+cRBAFGoxGCIFBwRIg/OE2rNZc5st5PwREhPqF3EAEApKenY+fOnThx4gTUajXGjRuHuXPnYsqUKXjooYewd+/eBpm7Tz/9FBKJBHl5eZDJZNizZw+eeeYZ3HHHHZBIKK1PiF8wjGXFmig03yWb+hwR4hdh2wSSeGby5MngOA65ubmIjY2FIAhYunQpli1bhm7dumHRokUNehfxPI9XX30Vffr0QdeuXTFz5kw89thjeO+994L0KggJU7apNVfTaoJwqVEkFWQT4hPKHBEAQFZWFrZv3+50W3p6Om644Qan2+6//37793fccQfuuOOOFhkfIW0ay1myQq6m1QSHzvS0lJ8Qn1DmiBBCQp2tEaTLzJHDfVRzRIhPKDgihJBQZyvKdrW/mtkhc0TTaoT4hIIjQggJdW7VHDncRwXZhPiEgiNCCAl17kyr2TJHDOd6c1pCSLMoOCKEkFBnm1ZzWZBN3bEJ8RcKjgghJNQx7gRHtu7Y1HyVEF9RcEQIIaHOXpDtIjgyWzNHVIxNiM8oOCKEkFDHulOQbeuOTcv4CfEVBUeEEBLq3FmtZt90ljJHhPiKgiPSpAEDBuDJJ59s8v709HTMnz+/xcZDSJvlVkE2bTpLiL9QcEQIIaHOk4JsWq1GiM8oOCKEkFDnVkG2bVqNMkeE+IqCIwIAqKmpwX333Qe1Wo3ExETMnTvX6f7S0lKMHDkSCoUCGRkZ+PTTTxucg2EYvPPOO7j++uuhUCiQmZmJb775xumYbdu2oXv37pDL5ejVqxeWLVsGhmFQWFgYyJdHSOvmUUE2ZY4I8RV9xAgwURQh1tUF5bkZhQKMm51yp0yZgo0bN+L7779HXFwcnn32Wezbtw/du3cHAIwZMwZnzpzB+vXrIZFIMGHCBJSWljY4z/PPP49XXnkFCxYswMcff4w777wTBw4cQE5ODiorKzFy5EgMHz4cn332GU6ePOmypokQYuXW9iFUc0SIv9C7KMDEujocuaJnUJ47e99eMEpls8dVV1dj8eLF+OSTT3DdddcBAD788EMkJycDAI4ePYoff/wRu3btQu/evQEAixcvRk5OToNz3XbbbXjooYcAALNmzcLq1auxcOFC/Pvf/8Znn30GhmHw3//+F3K5HLm5uTh9+jQefvhhf71kQsKTOwXZtFqNEL+haTWCP//8EwaDAVdeeaX9tqioKGRnZwMADh06BJ7n0bPnpSCvU6dO0Ol0Dc6Vn5/f4PqhQ4cAAEeOHEHXrl0hl8vt9/fp08efL4WQ8EQF2YS0KMocBRijUCB7396gPTchJAxQQTYhLYoyRwHGMAxYpTIoX+7WG7Vv3x4SiQQ7d+6033bx4kUcPXoUgCVLZDKZsHfvpSDvyJEjKC8vb3CuHTt2NLhum37Lzs7GgQMHoNfr7ffv3r3b7Z8lIW2WLTiyBUCNsWWVKHNEiM8oOCJQq9V48MEHMWXKFKxbtw6//fYbxowZA5a1/PfIzs5GQUEBHnnkEezcuRN79+7FQw89BEUjmamvv/4a77//Po4ePYoXXngBu3btwmOPPQYAuPvuuyEIAsaOHYtDhw7h559/xhtvvAEAbgdyhLRJjDUbJApNHyNQ5ogQf6HgiAAAXn/9dVxzzTUYOXIkBg8ejKuvvtqpxuiDDz5AUlIS+vfvj1GjRmHs2LGIi4trcJ6ZM2fiiy++QNeuXfHRRx/h888/R25uLgBAo9Hghx9+QGFhIbp3747nnnsOM2bMAACnOiRCyGXcyRxRQTYhfkMfMQgAS/bo448/xscff2y/bcqUKfbvExISsHz5cqfH3HvvvQ3Ok5SUhFWrVjX5PP369cP+/fvt1z/99FNIJBKkpqb6MnxCwptbfY6s99HGs4T4jN5FpEV99NFHyMzMRLt27bB//348/fTTuP322xudoiOEWNFSfkJaFAVHpEUVFxdjxowZKC4uRmJiIm677TbMnj072MMiJLTZl/K7Ksi2Zo6o5ogQn9G7iPiNKIrNHjN16lRMnTq1BUZDSBixBTyulvLb+xzRr3VCfEUF2YQQEupswZHZRc2R7T6aViPEZxQcBYA7GRTiPvp5kjaPNp4lpEVRcORHEonll1JtbW2QRxJebD9P28+XkDbHre1DKHNEiL/Q5LQfcRwHnU5n361e6UGXatKQKIqora1FaWkpdDodOI4L9pAICQ53Mkf21Wr0PiHEVxQc+VlCQgIA2AMk4judTmf/uRLSJrlVkG3rc0SZI0J8RcGRnzEMg8TERMTFxcFodLHslrhFIpFQxogQW3DkVuaIgiNCfEXBUYBwHEd/1Akh/uFOcEQdsgnxm1ZRkH3ixAk8+OCDyMjIgEKhQPv27fHCCy/AYDA4Hffrr7/immuugVwuR0pKCl577bUG5/r666/RqVMnyOVy5OXlYeXKlS31MgghxDv24MidgmwKjgjxVasIjg4fPgxBEPCf//wHv//+O+bNm4dFixbh2WeftR9TWVmJoUOHIi0tDXv37sXrr7+OF198Ee+++679mG3btuGuu+7Cgw8+iF9++QU33XQTbrrpJvz222/BeFmEEOIejwqyaVqNEF+1io8YBQUFKCgosF/PzMzEkSNH8M477+CNN94AYNnA1GAw4P3334dUKkXnzp1RWFiIN998E2PHjgUALFiwAAUFBfYNVWfNmoXVq1fjrbfewqJFi1r+hRFCiDtswZFbHbIpOCLEV60iOGpMRUUFoqKi7Ne3b9+Oa6+9FlKp1H7bsGHD8Oqrr+LixYuIjIzE9u3bMXHiRKfzDBs2DMuWLWvyefR6PfR6vdPzApZMFSGEtIg6E6AXgapaoKnfPdV1lmNqjU0fQ0gbZvu77U5j4VYZHB07dgwLFy60Z40Ay4amGRkZTsfFx8fb74uMjERxcbH9NsdjiouLm3yuOXPmYObMmQ1uT0lJ8eUlEEKIFz6yfrnwyuMAHm+JwRDSKlVVVUGr1bo8JqjB0bRp0/Dqq6+6PObQoUPo1KmT/frp06dRUFCA2267DQ8//HCgh4hnnnnGKdskCALKyspQUFCAPXv2+P35evfujd27dwf1PN481tPHuHN8ZWUlUlJScOrUKWg0Go/GE8789X8kUFp6fIF6vlB4L3r7eE8e4+6x9H5siN6LLfec/jivKIro2bMnkpKSmj02qMHRpEmTMGbMGJfHZGZm2r8/c+YMBg4ciH79+jkVWgOW5oslJSVOt9mu2xoINnWMqwaDMpkMMpnM6TadTgee5wPyC4LjOL+c15fzePNYTx/jyfEajYZ+GTvw1/+RQGnp8QXq+ULhvejt4z15jKfnp/fjJfRebLnn9Nd5pVIpWLb5tWhBDY5iY2MRGxvr1rGnT5/GwIED0bNnT3zwwQcNXlx+fj6ee+45GI1G+x5cq1evRnZ2NiIjI+3HrF27Fk8++aT9catXr0Z+fr7HYx8/frzHj2nJ8/pyHm8e6+ljAvXzawtC/WfX0uML5/eit4/35DGh/v8plIX6zy4Y4wuX9yMjtoItz0+fPo0BAwYgLS0NH374oVNzRVvWp6KiAtnZ2Rg6dCiefvpp/Pbbb3jggQcwb948+2q1bdu2oX///njllVcwYsQIfPHFF3j55Zexb98+dOnSJSivjTStsrISWq0WFRUVIf3pjJC2gN6PpC1pFQXZq1evxrFjx3Ds2DEkJyc73WeL7bRaLVatWoXx48ejZ8+eiImJwYwZM+yBEQD069cPn332GaZPn45nn30WHTt2xLJlyygwClEymQwvvPBCg2lNQkjLo/cjaUtaReaIEEIIIaSltIoO2YQQQgghLYWCI0IIIYQQBxQcEUIIIYQ4oOCIEEIIIcQBBUeEEEIIIQ4oOCKtUnl5OXr16oXu3bujS5cu+O9//xvsIRHSptXW1iItLQ2TJ08O9lAI8Vmr6HNEyOUiIiKwadMmKJVK1NTUoEuXLhg1ahSio6ODPTRC2qTZs2ejb9++wR4GIX5BmSPSKnEcB6VSCQDQ6/UQRRHUsouQ4Pjjjz9w+PBhXH/99cEeCiF+QcERCYpNmzZh5MiRSEpKAsMwWLZsWYNj3n77baSnp0Mul+PKK6/Erl27nO4vLy9Ht27dkJycjClTpiAmJqaFRk9I+PDHe3Hy5MmYM2dOC42YkMCj4IgERU1NDbp164a333670fu//PJLTJw4ES+88AL27duHbt26YdiwYSgtLbUfo9PpsH//fhw/fhyfffYZSkpKWmr4hIQNX9+L33//PbKyspCVldWSwyYkoGj7EBJ0DMPgu+++w0033WS/7corr0Tv3r3x1ltvAQAEQUBKSgoef/xxTJs2rcE5/vWvf2HQoEG49dZbW2rYhIQdb96LzzzzDD755BNwHIfq6moYjUZMmjQJM2bMCNKrIMR3lDkiIcdgMGDv3r0YPHiw/TaWZTF48GBs374dAFBSUoKqqioAQEVFBTZt2oTs7OygjJeQcOXOe3HOnDk4deoUTpw4gTfeeAMPP/wwBUak1aPVaiTknD9/HmazGfHx8U63x8fH4/DhwwCAkydPYuzYsfZC7Mcffxx5eXnBGC4hYcud9yIh4YiCI9Iq9enTB4WFhcEeBiHEwZgxY4I9BEL8gqbVSMiJiYkBx3ENCqxLSkqQkJAQpFER0vbQe5G0VRQckZAjlUrRs2dPrF271n6bIAhYu3Yt8vPzgzgyQtoWei+Stoqm1UhQVFdX49ixY/brx48fR2FhIaKiopCamoqJEydi9OjR6NWrF/r06YP58+ejpqYG999/fxBHTUj4ofciIQ3RUn4SFBs2bMDAgQMb3D569GgsWbIEAPDWW2/h9ddfR3FxMbp3747/9//+H6688soWHikh4Y3ei4Q0RMERIYQQQogDqjkihBBCCHFAwREhhBBCiAMKjgghhBBCHFBwRAghhBDigIIjQgghhBAHFBwRQgghhDig4IgQQgghxAEFR4QQQgghDig4IoQQQghxQMERIaRVSE9Px/z5890+fsOGDWAYBuXl5QEbEyEkPFFwRAjxK4ZhXH69+OKLXp139+7dGDt2rNvH9+vXD2fPnoVWq/Xq+fyBAjRCWic+2AMghISXs2fP2r//8ssvMWPGDBw5csR+m1qttn8viiLMZjN4vvlfRbGxsR6NQyqVIiEhwaPHEEIIQJkjQoifJSQk2L+0Wi0YhrFfP3z4MCIiIvDjjz+iZ8+ekMlk2LJlC/7880/ceOONiI+Ph1qtRu/evbFmzRqn814+rcYwDN577z3cfPPNUCqV6NixI/73v//Z7788a7NkyRLodDr8/PPPyMnJgVqtRkFBgVMwZzKZMGHCBOh0OkRHR+Ppp5/G6NGjcdNNNzX5ek+ePImRI0ciMjISKpUKnTt3xsqVK3HixAn7bveRkZFgGAZjxowBAAiCgDlz5iAjIwMKhQLdunXDN99802DsK1asQNeuXSGXy9G3b1/89ttvzT4vIcR3FBwRQlrctGnT8Morr+DQoUPo2rUrqqurMXz4cKxduxa//PILCgoKMHLkSBQVFbk8z8yZM3H77bfj119/xfDhw3HPPfegrKysyeNra2vxxhtv4OOPP8amTZtQVFSEyZMn2+9/9dVX8emnn+KDDz7A1q1bUVlZiWXLlrkcw/jx46HX67Fp0yYcOHAAr776KtRqNVJSUrB06VIAwJEjR3D27FksWLAAADBnzhx89NFHWLRoEX7//Xc89dRT+Oc//4mNGzc6nXvKlCmYO3cudu/ejdjYWIwcORJGo9Hl8xJC/EAkhJAA+eCDD0StVmu/vn79ehGAuGzZsmYf27lzZ3HhwoX262lpaeK8efPs1wGI06dPt1+vrq4WAYg//vij03NdvHjRPhYA4rFjx+yPefvtt8X4+Hj79fj4ePH111+3XzeZTGJqaqp44403NjnOvLw88cUXX2z0vsvHIIqiWF9fLyqVSnHbtm1Oxz744IPiXXfd5fS4L774wn7/hQsXRIVCIX755ZfNPi8hxDdUc0QIaXG9evVyul5dXY0XX3wRK1aswNmzZ2EymVBXV9ds5qhr167271UqFTQaDUpLS5s8XqlUon379vbriYmJ9uMrKipQUlKCPn362O/nOA49e/aEIAhNnnPChAl49NFHsWrVKgwePBi33HKL07gud+zYMdTW1mLIkCFOtxsMBvTo0cPptvz8fPv3UVFRyM7OxqFDh7x6XkKI+2hajRDS4lQqldP1yZMn47vvvsPLL7+MzZs3o7CwEHl5eTAYDC7PI5FInK4zDOMykGnseFEUPRy9s4ceegh//fUX7r33Xhw4cAC9evXCwoULmzy+uroaALBixQoUFhbavw4ePOhUd+Tv5yWEuI+CI0JI0G3duhVjxozBzTffjLy8PCQkJODEiRMtOgatVov4+Hjs3r3bfpvZbMa+ffuafWxKSgrGjRuHb7/9FpMmTcJ///tfAJYVc7bz2OTm5kImk6GoqAgdOnRw+kpJSXE6744dO+zfX7x4EUePHkVOTk6zz0sI8Q1NqxFCgq5jx4749ttvMXLkSDAMg+eff95lBihQHn/8ccyZMwcdOnRAp06dsHDhQly8eBEMwzT5mCeffBLXX389srKycPHiRaxfv94ewKSlpYFhGCxfvhzDhw+HQqFAREQEJk+ejKeeegqCIODqq69GRUUFtm7dCo1Gg9GjR9vP/dJLLyE6Ohrx8fF47rnnEBMTY1855+p5CSG+ocwRISTo3nzzTURGRqJfv34YOXIkhg0bhiuuuKLFx/H000/jrrvuwn333Yf8/Hyo1WoMGzYMcrm8yceYzWaMHz8eOTk5KCgoQFZWFv79738DANq1a4eZM2di2rRpiI+Px2OPPQYAmDVrFp5//nnMmTPH/rgVK1YgIyPD6dyvvPIKnnjiCfTs2RPFxcX44YcfnLJRTT0vIcQ3jOjrhDshhIQpQRCQk5OD22+/HbNmzWqx592wYQMGDhyIixcvQqfTtdjzEkIsaFqNEEKsTp48iVWrVqF///7Q6/V46623cPz4cdx9993BHhohpAXRtBohhFixLIslS5agd+/euOqqq3DgwAGsWbOGankIaWNoWo0QQgghxAFljgghhBBCHFBwRAghhBDigIIjQgghhBAHFBwRQgghhDig4IgQQgghxAEFR4QQQgghDig4IoQQQghxQMERIYQQQoiD/w9fXK8WDJRZywAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -686,7 +729,7 @@ ], "source": [ "eval_data = {}\n", - "w = 5\n", + "w = 1\n", "fig = plt.figure()\n", "for method in data_paths.keys():\n", " print(method)\n", @@ -695,16 +738,17 @@ " print(seed)\n", " temp[seed, 0, :] = perf_data[method][seed][\"x\"]\n", " temp[seed, 1, :] = perf_data[method][seed][\"y\"]\n", - " # temp[seed, 2, :] = perf_data[method][seed][\"z\"]\n", + " temp[seed, 2, :] = perf_data[method][seed][\"z\"]\n", " temp[seed, 3, :] = perf_data[method][seed][\"c\"]\n", " eval_data.update({method: temp})\n", "\n", " # plotting performance\n", - " # plt.plot(temp[0,0,:], -moving_average(np.mean(temp[:,1,:], axis=0),w)[w-1:], label=method)\n", - " # plt.fill_between(temp[0,0,:], np.mean(temp[:,1,:], axis=0)+np.mean(temp[:,2,:], axis=0)**.5, np.mean(temp[:,1,:], axis=0)-np.mean(temp[:,2,:], axis=0)**0.5, alpha=0.25)\n", + " plt.plot(temp[0,0,:], np.mean(temp[:,1,:], axis=0), label=method)\n", + " plt.fill_between(temp[0,0,:], np.mean(temp[:,1,:], axis=0)+np.mean(temp[:,2,:], axis=0), \n", + " np.mean(temp[:,1,:], axis=0)-np.mean(temp[:,2,:], axis=0), alpha=0.25)\n", "\n", " # plotting constraint violations\n", - " plt.plot(temp[0,0,:], np.mean(temp[:,3,:], axis=0), label=method)\n", + " # plt.plot(temp[0,0,:], np.mean(temp[:,3,:], axis=0), label=method)\n", "\n", "# gp_05 = np.load(os.getcwd() + \"/gp_mpc_data/gp_mpc_M_0.5_cost.npy\", allow_pickle=True)\n", "# gp_10 = np.load(os.getcwd() + \"/gp_mpc_data/gp_mpc_M_1.0_cost.npy\", allow_pickle=True)\n", @@ -717,16 +761,16 @@ "# plt.plot(gp_30.item()[\"mean\"][:,0], gp_30.item()[\"mean\"][:,1], label=\"GP-MPC (m=3.0)\")\n", "# plt.fill_between(gp_30.item()[\"mean\"][:,0], gp_30.item()[\"mean\"][:,1]-gp_30.item()[\"std\"], gp_30.item()[\"mean\"][:,1]+gp_30.item()[\"std\"], alpha=0.25)\n", "\n", - "gp_05 = np.load(os.getcwd() + \"/gp_mpc_data/gp_mpc_M_0.5_constraint_percentage.npy\", allow_pickle=True)\n", - "gp_10 = np.load(os.getcwd() + \"/gp_mpc_data/gp_mpc_M_1.0_constraint_percentage.npy\", allow_pickle=True)\n", - "gp_30 = np.load(os.getcwd() + \"/gp_mpc_data/gp_mpc_M_3.0_constraint_percentage.npy\", allow_pickle=True)\n", - "plt.plot(gp_05.item()[\"mean\"][:,0], gp_05.item()[\"mean\"][:,1], label=\"GP-MPC (m=0.5)\")\n", - "plt.plot(gp_10.item()[\"mean\"][:,0], gp_10.item()[\"mean\"][:,1], label=\"GP-MPC (m=1.0)\")\n", - "plt.plot(gp_30.item()[\"mean\"][:,0], gp_30.item()[\"mean\"][:,1], label=\"GP-MPC (m=3.0)\")\n", + "# gp_05 = np.load(os.getcwd() + \"/gp_mpc_data/gp_mpc_M_0.5_constraint_percentage.npy\", allow_pickle=True)\n", + "# gp_10 = np.load(os.getcwd() + \"/gp_mpc_data/gp_mpc_M_1.0_constraint_percentage.npy\", allow_pickle=True)\n", + "# gp_30 = np.load(os.getcwd() + \"/gp_mpc_data/gp_mpc_M_3.0_constraint_percentage.npy\", allow_pickle=True)\n", + "# plt.plot(gp_05.item()[\"mean\"][:,0], gp_05.item()[\"mean\"][:,1], label=\"GP-MPC (m=0.5)\")\n", + "# plt.plot(gp_10.item()[\"mean\"][:,0], gp_10.item()[\"mean\"][:,1], label=\"GP-MPC (m=1.0)\")\n", + "# plt.plot(gp_30.item()[\"mean\"][:,0], gp_30.item()[\"mean\"][:,1], label=\"GP-MPC (m=3.0)\")\n", "\n", "\n", "plt.legend()\n", - "plt.ylim(0,100)\n", + "plt.ylim(-200,0)\n", "plt.xscale(\"log\")\n", "plt.xlabel(\"Training steps\")\n", "plt.ylabel(\"% Constraint violation\")\n", diff --git a/examples/rl/train_rl_model.sh b/examples/rl/train_rl_model.sh index 712b2db17..8c4d3155d 100755 --- a/examples/rl/train_rl_model.sh +++ b/examples/rl/train_rl_model.sh @@ -1,19 +1,19 @@ #!/bin/bash -#SYS='cartpole' -# SYS='quadrotor_2D' -SYS='quadrotor_2D_attitude' -# SYS='quadrotor_3D' +SYS='cartpole' +#SYS='quadrotor_2D' +#SYS='quadrotor_2D_attitude' +#SYS='quadrotor_3D' -# TASK='stab' -TASK='track' +TASK='stab' +#TASK='track' -ALGO='ppo' -# ALGO='sac' +#ALGO='ppo' +#ALGO='sac' #ALGO='td3' -# ALGO='ddpg' +ALGO='ddpg' -# ALGO='safe_explorer_ppo' +#ALGO='safe_explorer_ppo' if [ "$SYS" == 'cartpole' ]; then SYS_NAME=$SYS @@ -45,7 +45,7 @@ if [ "$ALGO" == 'safe_explorer_ppo' ]; then fi # Train the unsafe controller/agent. -for SEED in {0..0} +for SEED in {0..9} do python3 ../../safe_control_gym/experiments/train_rl_controller.py \ --algo ${ALGO} \ diff --git a/examples/rlmpc/config_overrides/cartpole/qlearning_mpc_cartpole.yaml b/examples/rlmpc/config_overrides/cartpole/qlearning_mpc_cartpole.yaml index 8b1378917..e69de29bb 100644 --- a/examples/rlmpc/config_overrides/cartpole/qlearning_mpc_cartpole.yaml +++ b/examples/rlmpc/config_overrides/cartpole/qlearning_mpc_cartpole.yaml @@ -1 +0,0 @@ - diff --git a/examples/rlmpc/rlmpc_experiment.py b/examples/rlmpc/rlmpc_experiment.py index d6b39932f..bf605003d 100644 --- a/examples/rlmpc/rlmpc_experiment.py +++ b/examples/rlmpc/rlmpc_experiment.py @@ -46,7 +46,6 @@ def run(plot=True, training=False, n_episodes=1, n_steps=None, curr_path='.'): results, uncert_metrics = experiment.run_evaluation(n_episodes=n_episodes, n_steps=n_steps) elapsed_time_uncert = results['timestamp'][0][-1] - results['timestamp'][0][0] - if __name__ == '__main__': run() diff --git a/examples/rlmpc/rlmpc_experiment.sh b/examples/rlmpc/rlmpc_experiment.sh index b63aac140..337e31cce 100755 --- a/examples/rlmpc/rlmpc_experiment.sh +++ b/examples/rlmpc/rlmpc_experiment.sh @@ -19,4 +19,4 @@ python3 ./rlmpc_experiment.py \ --task ${SYS_NAME} \ --algo ${ALGO} \ --overrides \ - ./config_overrides/${SYS}/${SYS}_${TASK}.yaml + ./config_overrides/${SYS}/${SYS}_${TASK}.yaml diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..431afb635 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,20 @@ +munch~=2.5.0 +pytest~=7.4.4 +matplotlib~=3.9.0 +numpy~=1.26.4 +pyyaml~=6.0.1 +pybullet~=3.2.6 +setuptools~=69.5.1 +casadi~=3.6.5 +gymnasium~=0.28.1 +torch~=1.13.1 +scipy~=1.13.1 +imageio~=2.34.1 +tensorboard~=2.16.2 +termcolor~=1.1.0 +gpytorch~=1.11 +scikit-learn~=1.5.0 +cvxpy~=1.5.1 +pytope~=0.0.4 +optuna~=3.6.1 +mysql-connector-python~=8.0.33 diff --git a/safe_control_gym/controllers/__init__.py b/safe_control_gym/controllers/__init__.py index e3b500681..2897d9453 100644 --- a/safe_control_gym/controllers/__init__.py +++ b/safe_control_gym/controllers/__init__.py @@ -34,6 +34,10 @@ entry_point='safe_control_gym.controllers.sac.sac:SAC', config_entry_point='safe_control_gym.controllers.sac:sac.yaml') +register(idx='td3', + entry_point='safe_control_gym.controllers.td3.td3:TD3', + config_entry_point='safe_control_gym.controllers.td3:td3.yaml') + register(idx='ddpg', entry_point='safe_control_gym.controllers.ddpg.ddpg:DDPG', config_entry_point='safe_control_gym.controllers.ddpg:ddpg.yaml') @@ -55,20 +59,17 @@ config_entry_point='safe_control_gym.controllers.mpc:sqp_mpc.yaml') register(idx='sqp_gp_mpc', - entry_point='safe_control_gym.controllers.mpc.sqp_gp_mpc:SQPGPMPC', - config_entry_point='safe_control_gym.controllers.mpc:sqp_gp_mpc.yaml') + entry_point='safe_control_gym.controllers.mpc.sqp_gp_mpc:SQPGPMPC', + config_entry_point='safe_control_gym.controllers.mpc:sqp_gp_mpc.yaml') register(idx='mpc_acados', - entry_point='safe_control_gym.controllers.mpc.mpc_acados:MPC_ACADOS', - config_entry_point='safe_control_gym.controllers.mpc:mpc_acados.yaml') + entry_point='safe_control_gym.controllers.mpc.mpc_acados:MPC_ACADOS', + config_entry_point='safe_control_gym.controllers.mpc:mpc_acados.yaml') register(idx='gpmpc_acados', - entry_point='safe_control_gym.controllers.mpc.gpmpc_acados:GPMPC_ACADOS', - config_entry_point='safe_control_gym.controllers.mpc:gpmpc_acados.yaml') + entry_point='safe_control_gym.controllers.mpc.gpmpc_acados:GPMPC_ACADOS', + config_entry_point='safe_control_gym.controllers.mpc:gpmpc_acados.yaml') + register(idx='qlearning_mpc', entry_point='safe_control_gym.controllers.mpc.qlearning_mpc:Qlearning_MPC', config_entry_point='safe_control_gym.controllers.mpc:qlearning_mpc.yaml') - -register(idx='td3', - entry_point='safe_control_gym.controllers.td3.td3:TD3', - config_entry_point='safe_control_gym.controllers.td3:td3.yaml') diff --git a/safe_control_gym/controllers/ddpg/ddpg.py b/safe_control_gym/controllers/ddpg/ddpg.py index 33e9605cf..eb5b89edc 100644 --- a/safe_control_gym/controllers/ddpg/ddpg.py +++ b/safe_control_gym/controllers/ddpg/ddpg.py @@ -1,10 +1,10 @@ -'''Deep Deterministic Policy Gradient +"""Deep Deterministic Policy Gradient Reference paper & code: * [Continuous Control with Deep Reinforcement Learning](https://arxiv.org/pdf/1509.02971.pdf) * [openai spinning up - ddpg](https://github.com/openai/spinningup/tree/master/spinup/algos/pytorch/ddpg) * [DeepRL - ddpg](https://github.com/ShangtongZhang/DeepRL/blob/master/deep_rl/agent/DDPG_agent.py) -''' +""" import os import time @@ -26,7 +26,7 @@ class DDPG(BaseController): - '''Deep Deterministic Policy Gradient.''' + """Deep Deterministic Policy Gradient.""" def __init__(self, env_func, @@ -85,7 +85,7 @@ def __init__(self, self.logger = ExperimentLogger(output_dir, log_file_out=log_file_out, use_tensorboard=use_tensorboard) def reset(self): - '''Prepares for training or testing.''' + """Prepares for training or testing.""" if self.training: # set up stats tracking self.env.add_tracker('constraint_violation', 0) @@ -107,14 +107,14 @@ def reset(self): self.env.add_tracker('mse', 0, mode='queue') def close(self): - '''Shuts down and cleans up lingering resources.''' + """Shuts down and cleans up lingering resources.""" self.env.close() if self.training: self.eval_env.close() self.logger.close() def save(self, path, save_buffer=True): - '''Saves model params and experiment state to checkpoint path.''' + """Saves model params and experiment state to checkpoint path.""" path_dir = os.path.dirname(path) os.makedirs(path_dir, exist_ok=True) @@ -141,7 +141,7 @@ def save(self, path, save_buffer=True): torch.save(state_dict, path) def load(self, path): - '''Restores model and experiment given checkpoint path.''' + """Restores model and experiment given checkpoint path.""" state = torch.load(path) # restore params @@ -162,7 +162,7 @@ def load(self, path): self.logger.load(self.total_steps) def learn(self, env=None, **kwargs): - '''Performs learning (pre-training, training, fine-tuning, etc).''' + """Performs learning (pre-training, training, fine-tuning, etc.).""" if self.num_checkpoints > 0: step_interval = np.linspace(0, self.max_env_steps, self.num_checkpoints) interval_save = np.zeros_like(step_interval, dtype=bool) @@ -204,7 +204,7 @@ def learn(self, env=None, **kwargs): self.log_step(results) def select_action(self, obs, info=None): - '''Determine the action to take at the current timestep. + """Determine the action to take at the current timestep. Args: obs (ndarray): The observation at this timestep. @@ -212,7 +212,7 @@ def select_action(self, obs, info=None): Returns: action (ndarray): The action chosen by the controller. - ''' + """ with torch.no_grad(): obs = torch.FloatTensor(obs).to(self.device) @@ -221,7 +221,7 @@ def select_action(self, obs, info=None): return action def run(self, env=None, render=False, n_episodes=10, verbose=False, **kwargs): - '''Runs evaluation with current policy.''' + """Runs evaluation with current policy.""" self.agent.eval() self.obs_normalizer.set_read_only() if env is None: @@ -269,7 +269,7 @@ def run(self, env=None, render=False, n_episodes=10, verbose=False, **kwargs): return eval_results def train_step(self, **kwargs): - '''Performs a training step.''' + """Performs a training step.""" self.agent.train() self.obs_normalizer.unset_read_only() obs = self.obs @@ -341,7 +341,7 @@ def train_step(self, **kwargs): return results def log_step(self, results): - '''Does logging after a training step.''' + """Does logging after a training step.""" step = results['step'] # runner stats self.logger.add_scalars( @@ -371,6 +371,7 @@ def log_step(self, results): { 'ep_length': ep_lengths.mean(), 'ep_return': ep_returns.mean(), + 'ep_return_std': ep_returns.std(), 'ep_reward': (ep_returns / ep_lengths).mean(), 'ep_constraint_violation': ep_constraint_violation.mean() }, @@ -390,6 +391,7 @@ def log_step(self, results): { 'ep_length': eval_ep_lengths.mean(), 'ep_return': eval_ep_returns.mean(), + 'ep_return_std': eval_ep_returns.std(), 'ep_reward': (eval_ep_returns / eval_ep_lengths).mean(), 'constraint_violation': eval_constraint_violation.mean(), 'mse': eval_mse.mean() diff --git a/safe_control_gym/controllers/ddpg/ddpg_utils.py b/safe_control_gym/controllers/ddpg/ddpg_utils.py index ea8a98a7d..705d69691 100644 --- a/safe_control_gym/controllers/ddpg/ddpg_utils.py +++ b/safe_control_gym/controllers/ddpg/ddpg_utils.py @@ -7,6 +7,8 @@ from safe_control_gym.controllers.sac.sac_utils import SACBuffer, soft_update from safe_control_gym.math_and_models.neural_networks import MLP +from safe_control_gym.math_and_models.random_processes import OrnsteinUhlenbeckProcess +from safe_control_gym.math_and_models.schedule import LinearSchedule # ----------------------------------------------------------------------------------- # Agent @@ -14,7 +16,7 @@ class DDPGAgent: - '''A DDPG class that encapsulates model, optimizer and update functions.''' + """A DDPG class that encapsulates model, optimizer and update functions.""" def __init__(self, obs_space, @@ -46,20 +48,20 @@ def __init__(self, self.critic_opt = torch.optim.Adam(self.ac.q.parameters(), critic_lr) def to(self, device): - '''Puts agent to device.''' + """Puts agent to device.""" self.ac.to(device) self.ac_targ.to(device) def train(self): - '''Sets training mode.''' + """Sets training mode.""" self.ac.train() def eval(self): - '''Sets evaluation mode.''' + """Sets evaluation mode.""" self.ac.eval() def state_dict(self): - '''Snapshots agent state.''' + """Snapshots agent state.""" return { 'ac': self.ac.state_dict(), 'ac_targ': self.ac_targ.state_dict(), @@ -68,14 +70,14 @@ def state_dict(self): } def load_state_dict(self, state_dict): - '''Restores agent state.''' + """Restores agent state.""" self.ac.load_state_dict(state_dict['ac']) self.ac_targ.load_state_dict(state_dict['ac_targ']) self.actor_opt.load_state_dict(state_dict['actor_opt']) self.critic_opt.load_state_dict(state_dict['critic_opt']) def compute_policy_loss(self, batch): - '''Returns policy loss(es) given batch of data.''' + """Returns policy loss(es) given batch of data.""" obs = batch['obs'] act = self.ac.actor(obs) q = self.ac.q(obs, act) @@ -83,7 +85,7 @@ def compute_policy_loss(self, batch): return policy_loss def compute_q_loss(self, batch): - '''Returns q-value loss(es) given batch of data.''' + """Returns q-value loss(es) given batch of data.""" obs, act, rew, next_obs, mask = batch['obs'], batch['act'], batch['rew'], batch['next_obs'], batch['mask'] q = self.ac.q(obs, act) @@ -97,7 +99,7 @@ def compute_q_loss(self, batch): return critic_loss def update(self, batch): - '''Updates model parameters based on current training batch.''' + """Updates model parameters based on current training batch.""" results = defaultdict(list) # actor update @@ -150,7 +152,7 @@ def forward(self, obs, act): class MLPActorCritic(nn.Module): - '''Model for the actor-critic agent.''' + """Model for the actor-critic agent.""" def __init__(self, obs_space, act_space, hidden_dims=(64, 64), activation='relu'): super().__init__() @@ -180,14 +182,14 @@ def act(self, obs, **kwargs): # ----------------------------------------------------------------------------------- class DDPGBuffer(SACBuffer): - '''Storage for replay buffer during training. + """Storage for replay buffer during training. Attributes: max_size (int): maximum size of the replay buffer. batch_size (int): number of samples (steps) per batch. - scheme (dict): describs shape & other info of data to be stored. + scheme (dict): describes shape & other info of data to be stored. keys (list): names of all data from scheme. - ''' + """ def __init__(self, obs_space, act_space, max_size, batch_size=None): self.max_size = max_size @@ -224,7 +226,7 @@ def __init__(self, obs_space, act_space, max_size, batch_size=None): # ----------------------------------------------------------------------------------- def make_action_noise_process(noise_config, act_space): - '''Construct a process for generating action noise during agent training.''' + """Construct a process for generating action noise during agent training.""" process_func = noise_config.pop('func') std_config = noise_config.pop('std') diff --git a/safe_control_gym/controllers/lqr/lqr_utils.py b/safe_control_gym/controllers/lqr/lqr_utils.py index 939f068a5..9bcd5919e 100644 --- a/safe_control_gym/controllers/lqr/lqr_utils.py +++ b/safe_control_gym/controllers/lqr/lqr_utils.py @@ -91,4 +91,4 @@ def get_cost_weight_matrix(weights, dim): W = np.diag(weights * dim) else: raise Exception('Wrong dimension for cost weights.') - return W \ No newline at end of file + return W diff --git a/safe_control_gym/controllers/mpc/gp_mpc.py b/safe_control_gym/controllers/mpc/gp_mpc.py index f30de4099..143e63e02 100644 --- a/safe_control_gym/controllers/mpc/gp_mpc.py +++ b/safe_control_gym/controllers/mpc/gp_mpc.py @@ -16,7 +16,8 @@ and the inducing points are the previous MPC solution. 3. Each dimension of the learned error dynamics is an independent Zero Mean SE Kernel GP. ''' -import time, os +import os +import time from copy import deepcopy from functools import partial @@ -30,10 +31,10 @@ from sklearn.model_selection import train_test_split from skopt.sampler import Lhs +from safe_control_gym.controllers.lqr.lqr_utils import discretize_linear_system from safe_control_gym.controllers.mpc.gp_utils import (GaussianProcessCollection, ZeroMeanIndependentGPModel, covMatern52ard, covSEard, kmeans_centriods) from safe_control_gym.controllers.mpc.linear_mpc import MPC, LinearMPC -from safe_control_gym.controllers.lqr.lqr_utils import discretize_linear_system from safe_control_gym.envs.benchmark_env import Task @@ -311,7 +312,7 @@ def precompute_probabilistic_limits(self, if self.x_prev is not None and self.u_prev is not None: # cov_x = np.zeros((nx, nx)) cov_x = np.diag([self.initial_rollout_std**2] * nx) - z_batch = np.hstack((self.x_prev[:, :-1].T, self.u_prev.T)) # (T, input_dim) + z_batch = np.hstack((self.x_prev[:, :-1].T, self.u_prev.T)) # (T, input_dim) # Compute the covariance of the dynamics at each time step. time_before = time.time() _, cov_d_tensor_batch = self.gaussian_process.predict(z_batch, return_pred=False) @@ -331,7 +332,7 @@ def precompute_probabilistic_limits(self, # TODO: Addition of noise here! And do we still need initial_rollout_std # _, cov_d_tensor = self.gaussian_process.predict(z[None, :], return_pred=False) # cov_d = cov_d_tensor.detach().numpy() - if False: # if self.sparse_gp: + if False: # if self.sparse_gp: dim_gp_outputs = len(self.target_mask) cov_d = np.zeros((dim_gp_outputs, dim_gp_outputs)) K_z_z = self.gaussian_process.kernel(torch.from_numpy(z[None, self.input_mask]).double()).detach().numpy() @@ -339,9 +340,9 @@ def precompute_probabilistic_limits(self, torch.tensor(z_ind).double()).detach().numpy() for i in range(dim_gp_outputs): Q_z_z = K_z_zind[i, :, :] @ K_zind_zind_inv[i, :, :] @ K_z_zind[i, :, :].T - cov_d[i, i] = K_z_z[i, 0] - Q_z_z +\ + cov_d[i, i] = K_z_z[i, 0] - Q_z_z +\ self.K_z_zind_func(z1=z, z2=z_ind)['K'][i, :].toarray() @ Sigma_inv[i] @ self.K_z_zind_func(z1=z, z2=z_ind)['K'][i, :].T.toarray() - else: + else: cov_d = cov_d_batch[i, :, :] _, _, cov_noise, _ = self.gaussian_process.get_hyperparameters() cov_d = cov_d + np.diag(cov_noise.detach().numpy()) @@ -453,7 +454,7 @@ def setup_gp_optimizer(self, n_ind_points, solver='ipopt'): Args: n_ind_points (int): Number of inducing points. ''' - print(f'Setting up GP MPC with {solver} solver.') + print(f'Setting up GP MPC with {solver} solver.') nx, nu = self.model.nx, self.model.nu T = self.T # Define optimizer and variables. @@ -574,7 +575,7 @@ def setup_gp_optimizer(self, n_ind_points, solver='ipopt'): # 'print_time': 1, # 'expand': True, # 'verbose': True} - opts = {'expand': True,} + opts = {'expand': True, } # opti.solver('ipopt', opts) opti.solver(solver, opts) self.opti_dict = { @@ -909,14 +910,14 @@ def load(self, model_path): '''Loads a pretrained batch GP model. Args: model_path (str): Path to the pretrained model. ''' - + if not self.parallel: raise ValueError('load function only works with parallel GP models.') data = np.load(f'{model_path}/data.npz') gp_model_path = f'{model_path}/best_model.pth' self.train_gp(input_data=data['data_inputs'], target_data=data['data_targets'], gp_model=gp_model_path) print('================== GP models loaded. =================') - + def learn(self, env=None): '''Performs multiple epochs learning. ''' @@ -984,12 +985,12 @@ def learn(self, env=None): train_runs[epoch].update({episode: munch.munchify(run_results)}) lengthscale, outputscale, noise, kern = self.gaussian_process.get_hyperparameters(as_numpy=True) - - # save training data + + # save training data np.savez(os.path.join(self.output_dir, 'data'), - data_inputs=training_results['train_inputs'], - data_targets=training_results['train_targets']) - + data_inputs=training_results['train_inputs'], + data_targets=training_results['train_targets']) + # close environments for env in train_envs: env.close() @@ -1151,7 +1152,7 @@ def compute_initial_guess(self, init_state, goal_states): opti.set_value(mean_post_factor, mean_post_factor_val) opti.set_value(z_ind, z_ind_val) - # Solve the optimization problem. + # Solve the optimization problem. try: sol = opti.solve() x_val, u_val = sol.value(x_var), sol.value(u_var) @@ -1168,9 +1169,8 @@ def compute_initial_guess(self, init_state, goal_states): self.x_prev, self.u_prev = x_val, u_val x_guess = x_val u_guess = u_val - + time_after = time.time() print('MPC _compute_initial_guess time: ', time_after - time_before) return x_guess, u_guess - \ No newline at end of file diff --git a/safe_control_gym/controllers/mpc/gp_utils.py b/safe_control_gym/controllers/mpc/gp_utils.py index 7e83d3509..fb67991b6 100644 --- a/safe_control_gym/controllers/mpc/gp_utils.py +++ b/safe_control_gym/controllers/mpc/gp_utils.py @@ -59,6 +59,7 @@ def covMatern52ard(x, r_over_l = ca.sqrt(dist) return sf2 * (1 + ca.sqrt(5) * r_over_l + 5 / 3 * r_over_l ** 2) * ca.exp(- ca.sqrt(5) * r_over_l) + def covMatern52ard(x, z, ell, @@ -80,6 +81,7 @@ def covMatern52ard(x, r_over_l = ca.sqrt(dist) return sf2 * (1 + ca.sqrt(5) * r_over_l + 5 / 3 * r_over_l ** 2) * ca.exp(- ca.sqrt(5) * r_over_l) + class ZeroMeanIndependentMultitaskGPModel(gpytorch.models.ExactGP): '''Multidimensional Gaussian Process model with zero mean function. @@ -110,8 +112,8 @@ def __init__(self, # For constant mean function. if kernel == 'RBF': self.covar_module = gpytorch.kernels.ScaleKernel( - gpytorch.kernels.RBFKernel(batch_shape=torch.Size([self.n]), - ard_num_dims=train_x.shape[1]), + gpytorch.kernels.RBFKernel(batch_shape=torch.Size([self.n]), + ard_num_dims=train_x.shape[1]), batch_shape=torch.Size([self.n]), ard_num_dims=train_x.shape[1] ) @@ -119,9 +121,9 @@ def __init__(self, self.covar_module = gpytorch.kernels.ScaleKernel( gpytorch.kernels.MaternKernel(batch_shape=torch.Size([self.n]), ard_num_dims=train_x.shape[1]), - batch_shape=torch.Size([self.n]), - ard_num_dims=train_x.shape[1] - ) + batch_shape=torch.Size([self.n]), + ard_num_dims=train_x.shape[1] + ) else: raise NotImplementedError @@ -313,8 +315,8 @@ def init_with_hyperparam(self, gp_K_plus_noise_inv_list.append(gp.model.K_plus_noise_inv.detach()) print('Loaded!') gp_K_plus_noise = torch.stack(gp_K_plus_noise_list) - gp_K_plus_noise_inv = torch.stack(gp_K_plus_noise_inv_list) - + gp_K_plus_noise_inv = torch.stack(gp_K_plus_noise_inv_list) + self.K_plus_noise = gp_K_plus_noise self.K_plus_noise_inv = gp_K_plus_noise_inv self.casadi_predict = self.make_casadi_predict_func() @@ -424,8 +426,8 @@ def predict(self, Return Predictions means : torch.tensor (N_samples x output DIM). - covs : torch.tensor (N_samples x output DIM x output DIM). - NOTE: For compatibility with the original implementation, + covs : torch.tensor (N_samples x output DIM x output DIM). + NOTE: For compatibility with the original implementation, the output will be squeezed when N_samples == 1. ''' num_batch = x.shape[0] @@ -502,12 +504,12 @@ def make_casadi_linearized_predict_func(self): dmu[:, gp_ind] = gp.casadi_linearized_predict(z=z)['mean'] A, B = dmu.T[:, :Ny], dmu.T[:, Ny:] assert A.shape == (Ny, Ny), ValueError('A matrix has wrong shape.') - assert B.shape == (Ny, Nz-Ny), ValueError('B matrix has wrong shape.') + assert B.shape == (Ny, Nz - Ny), ValueError('B matrix has wrong shape.') casadi_lineaized_predict = ca.Function('linearized_pred', - [z], - [dmu, A, B], - ['z'], - ['mean', 'A', 'B']) + [z], + [dmu, A, B], + ['z'], + ['mean', 'A', 'B']) return casadi_lineaized_predict def prediction_jacobian(self, @@ -788,8 +790,8 @@ def predict(self, Returns: Predictions means : torch.tensor (N_samples x output DIM). - covs : torch.tensor (N_samples x output DIM x output DIM). - NOTE: For compatibility with the original implementation, + covs : torch.tensor (N_samples x output DIM x output DIM). + NOTE: For compatibility with the original implementation, the output will be squeezed when N_samples == 1. ''' @@ -1063,7 +1065,6 @@ def train(self, self.casadi_linearized_predict = \ self.make_casadi_linearized_prediction_func(train_x, train_y) - def predict(self, x, requires_grad=False, @@ -1135,7 +1136,7 @@ def make_casadi_prediction_func(self, train_inputs, train_targets): ['z'], ['mean']) return predict - + # def make_se_kernel_derivative_func(self, # train_x): # '''Get the derivative of the SE kernel with respect to the input. @@ -1156,7 +1157,7 @@ def make_casadi_prediction_func(self, train_inputs, train_targets): # M_inv = ca.DM(M_inv) # assert M.shape[0] == train_x.shape[1], ValueError('Mismatch in input dimensions') # Nx = len(self.input_mask) # number of input dimension - # num_data = train_x.shape[0] + # num_data = train_x.shape[0] # z = ca.SX.sym('z', Nx) # query point # # compute 1st derivative of the kernel (8) # dkdx = ca.SX.zeros(Nx, num_data) @@ -1166,7 +1167,7 @@ def make_casadi_prediction_func(self, train_inputs, train_targets): # dkdx = M_inv**2 @ dkdx # # compute 2nd derivative of the kernel (9) # d2kdx2 = M_inv**2 * output_scale ** 2 - + # dkdx_func = ca.Function('dkdx', # [z], # [dkdx], @@ -1178,7 +1179,7 @@ def make_casadi_prediction_func(self, train_inputs, train_targets): # ['z'], # ['d2kdx2']) # return dkdx_func, d2kdx2_func - + def make_casadi_linearized_prediction_func(self, train_inputs, train_targets): '''Get the linearized prediction casadi function. See Berkenkamp and Schoellig, 2015, eq. (8) (9) for the derivative @@ -1200,42 +1201,42 @@ def make_casadi_linearized_prediction_func(self, train_inputs, train_targets): M_inv = ca.DM(M_inv) assert M.shape[0] == train_inputs.shape[1], ValueError('Mismatch in input dimensions') num_data = train_inputs.shape[0] - z = ca.SX.sym('z', len(self.input_mask)) # query point + z = ca.SX.sym('z', len(self.input_mask)) # query point # compute 1st derivative of the kernel (8) dkdx = ca.SX.zeros(len(self.input_mask), num_data) for i in range(num_data): dkdx[:, i] = (train_inputs[i] - z) * \ - covSEard(z, train_inputs[i].T, lengthscale.T, output_scale) + covSEard(z, train_inputs[i].T, lengthscale.T, output_scale) dkdx = M_inv**2 @ dkdx # compute 2nd derivative of the kernel (9) - d2kdx2 = M_inv**2 * output_scale ** 2 - + d2kdx2 = M_inv**2 * output_scale ** 2 + dkdx_func = ca.Function('dkdx', [z], [dkdx], ['z'], ['dkdx']) d2kdx2_func = ca.Function('d2kdx2', - [z], - [d2kdx2], - ['z'], - ['d2kdx2']) + [z], + [d2kdx2], + ['z'], + ['d2kdx2']) mean = dkdx_func(z) \ - @ self.model.K_plus_noise_inv.detach().numpy() @ train_targets + @ self.model.K_plus_noise_inv.detach().numpy() @ train_targets linearized_predict = ca.Function('linearized_predict', - [z], - [mean], - ['z'], - ['mean']) + [z], + [mean], + ['z'], + ['mean']) return linearized_predict # def linearized_prediction(self, - # x, + # x, # requires_grad=False, # return_pred=True # ): # ''' - # Linearized predictions: + # Linearized predictions: # See Berkenkamp and Schoellig, 2015, eq. (10) (11). # Args: diff --git a/safe_control_gym/controllers/mpc/gpmpc_acados.py b/safe_control_gym/controllers/mpc/gpmpc_acados.py index 761501635..7711400ac 100644 --- a/safe_control_gym/controllers/mpc/gpmpc_acados.py +++ b/safe_control_gym/controllers/mpc/gpmpc_acados.py @@ -1,6 +1,5 @@ - import time from copy import deepcopy from functools import partial @@ -10,22 +9,24 @@ import numpy as np import scipy import torch +from acados_template import AcadosModel, AcadosOcp, AcadosOcpSolver, AcadosSimSolver from sklearn.metrics import pairwise_distances_argmin_min from sklearn.model_selection import train_test_split from skopt.sampler import Lhs from safe_control_gym.controllers.lqr.lqr_utils import discretize_linear_system +from safe_control_gym.controllers.mpc.gp_mpc import GPMPC from safe_control_gym.controllers.mpc.gp_utils import (GaussianProcessCollection, ZeroMeanIndependentGPModel, covSEard, kmeans_centriods) from safe_control_gym.controllers.mpc.linear_mpc import MPC, LinearMPC from safe_control_gym.controllers.mpc.mpc import MPC -from safe_control_gym.controllers.mpc.gp_mpc import GPMPC # from safe_control_gym.controllers.mpc.sqp_mpc import SQPMPC from safe_control_gym.envs.benchmark_env import Task -from acados_template import AcadosOcp, AcadosOcpSolver, AcadosSimSolver, AcadosModel + class GPMPC_ACADOS(GPMPC): '''Implements a GP-MPC controller with Acados optimization.''' + def __init__( self, env_func, @@ -46,8 +47,8 @@ def __init__( use_gpu: bool = False, gp_model_path: str = None, n_ind_points: int = 30, - inducing_point_selection_method = 'kmeans', - recalc_inducing_points_at_every_step = False, + inducing_point_selection_method='kmeans', + recalc_inducing_points_at_every_step=False, prob: float = 0.955, initial_rollout_std: float = 0.005, input_mask: list = None, @@ -64,7 +65,7 @@ def __init__( use_RTI: bool = False, **kwargs ): - + if prior_info is None or prior_info == {}: raise ValueError('GPMPC_ACADOS requires prior_prop to be defined. You may use the real mass properties and then use prior_param_coeff to modify them accordingly.') prior_info['prior_prop'].update((prop, val * prior_param_coeff) for prop, val in prior_info['prior_prop'].items()) @@ -96,38 +97,38 @@ def __init__( self.sparse_gp = sparse_gp # super().__init__() # TODO: check the inheritance of the class super().__init__( - env_func = env_func, - seed= seed, - horizon = horizon, - q_mpc = q_mpc, - r_mpc = r_mpc, - constraint_tol = constraint_tol, - additional_constraints = additional_constraints, - soft_constraints = soft_constraints, - warmstart = warmstart, - train_iterations = train_iterations, - test_data_ratio = test_data_ratio, - overwrite_saved_data = overwrite_saved_data, - optimization_iterations = optimization_iterations, - learning_rate = learning_rate, - normalize_training_data = normalize_training_data, - use_gpu = use_gpu, - gp_model_path = gp_model_path, - prob = prob, - initial_rollout_std = initial_rollout_std, - input_mask = input_mask, - target_mask = target_mask, - gp_approx = gp_approx, - sparse_gp = sparse_gp, - n_ind_points = n_ind_points, - inducing_point_selection_method = 'kmeans', - recalc_inducing_points_at_every_step = False, - online_learning = online_learning, - prior_info = prior_info, + env_func=env_func, + seed=seed, + horizon=horizon, + q_mpc=q_mpc, + r_mpc=r_mpc, + constraint_tol=constraint_tol, + additional_constraints=additional_constraints, + soft_constraints=soft_constraints, + warmstart=warmstart, + train_iterations=train_iterations, + test_data_ratio=test_data_ratio, + overwrite_saved_data=overwrite_saved_data, + optimization_iterations=optimization_iterations, + learning_rate=learning_rate, + normalize_training_data=normalize_training_data, + use_gpu=use_gpu, + gp_model_path=gp_model_path, + prob=prob, + initial_rollout_std=initial_rollout_std, + input_mask=input_mask, + target_mask=target_mask, + gp_approx=gp_approx, + sparse_gp=sparse_gp, + n_ind_points=n_ind_points, + inducing_point_selection_method='kmeans', + recalc_inducing_points_at_every_step=False, + online_learning=online_learning, + prior_info=prior_info, # inertial_prop: list = [1.0], - prior_param_coeff = prior_param_coeff, - terminate_run_on_done = terminate_run_on_done, - output_dir = output_dir, + prior_param_coeff=prior_param_coeff, + terminate_run_on_done=terminate_run_on_done, + output_dir=output_dir, **kwargs) # self.prior_ctrl = LinearMPC( # self.prior_env_func, @@ -153,7 +154,7 @@ def __init__( self.data_inputs = None self.data_targets = None self.prior_dynamics_func = self.prior_ctrl.linear_dynamics_func - # self.prior_dynamics_func = self.prior_ctrl.dynamics_func # nonlinear prior + # self.prior_dynamics_func = self.prior_ctrl.dynamics_func # nonlinear prior self.X_EQ = self.prior_ctrl.X_EQ self.U_EQ = self.prior_ctrl.U_EQ # GP and training parameters. @@ -197,11 +198,11 @@ def __init__( self.setup_acados_model() self.setup_acados_optimizer() self.acados_ocp_solver = AcadosOcpSolver(self.ocp) - + def setup_acados_model(self) -> AcadosModel: model_name = self.env.NAME - + acados_model = AcadosModel() acados_model.x = self.model.x_sym acados_model.u = self.model.u_sym @@ -211,18 +212,18 @@ def setup_acados_model(self) -> AcadosModel: B_lin = self.discrete_dfdu if self.gaussian_process is None: - f_disc = self.prior_dynamics_func(x0=acados_model.x- self.X_EQ, - p=acados_model.u- self.U_EQ)['xf'] \ + f_disc = self.prior_dynamics_func(x0=acados_model.x - self.X_EQ, + p=acados_model.u - self.U_EQ)['xf'] \ + self.prior_ctrl.X_EQ[:, None] else: - z = cs.vertcat(acados_model.x, acados_model.u) # GP prediction point + z = cs.vertcat(acados_model.x, acados_model.u) # GP prediction point z = z[self.input_mask] if self.sparse_gp: raise NotImplementedError('Sparse GP not implemented for acados.') else: - f_disc = self.prior_dynamics_func(x0=acados_model.x- self.X_EQ, - p=acados_model.u- self.U_EQ)['xf'] \ - + self.prior_ctrl.X_EQ[:, None] + f_disc = self.prior_dynamics_func(x0=acados_model.x - self.X_EQ, + p=acados_model.u - self.U_EQ)['xf'] \ + + self.prior_ctrl.X_EQ[:, None] + self.Bd @ self.gaussian_process.casadi_predict(z=z)['mean'] acados_model.disc_dyn_expr = f_disc @@ -246,9 +247,9 @@ def setup_acados_optimizer(self): ocp.model = self.acados_model # set dimensions - ocp.dims.N = self.T # prediction horizon + ocp.dims.N = self.T # prediction horizon - # set cost + # set cost ocp.cost.cost_type = 'LINEAR_LS' ocp.cost.cost_type_e = 'LINEAR_LS' ocp.cost.W = scipy.linalg.block_diag(self.Q, self.R) @@ -256,7 +257,7 @@ def setup_acados_optimizer(self): ocp.cost.Vx = np.zeros((ny, nx)) ocp.cost.Vx[:nx, :nx] = np.eye(nx) ocp.cost.Vu = np.zeros((ny, nu)) - ocp.cost.Vu[nx:(nx+nu), :nu] = np.eye(nu) + ocp.cost.Vu[nx:(nx + nu), :nu] = np.eye(nu) ocp.cost.Vx_e = np.eye(nx) # placeholder y_ref and y_ref_e (will be set in select_action) ocp.cost.yref = np.zeros((ny, )) @@ -292,22 +293,21 @@ def setup_acados_optimizer(self): input_constraint_expr_list.append(input_constraint(ocp.model.u)) # chance input constraint tightening input_tighten_list.append(cs.MX.sym(f'input_tighten_{ic_i}', input_constraint(ocp.model.u).shape[0], 1)) - + h_expr_list = state_constraint_expr_list + input_constraint_expr_list h_expr = cs.vertcat(*h_expr_list) h0_expr = cs.vertcat(*h_expr_list) - he_expr = cs.vertcat(*state_constraint_expr_list) # terminal constraints are only state constraints + he_expr = cs.vertcat(*state_constraint_expr_list) # terminal constraints are only state constraints # pass the constraints to the ocp object ocp = self.processing_acados_constraints_expression(ocp, h0_expr, h_expr, he_expr, state_tighten_list, input_tighten_list) # pass the tightening variables to the ocp object as parameters tighten_var = cs.vertcat(*state_tighten_list, *input_tighten_list) - ocp.model.p = tighten_var - ocp.parameter_values = np.zeros((tighten_var.shape[0], )) # dummy values + ocp.model.p = tighten_var + ocp.parameter_values = np.zeros((tighten_var.shape[0], )) # dummy values # slack costs for nonlinear constraints if self.gp_soft_constraints: raise NotImplementedError('Soft constraints not implemented for acados.') - # placeholder initial state constraint x_init = np.zeros((nx)) @@ -324,10 +324,10 @@ def setup_acados_optimizer(self): self.ocp = ocp - def processing_acados_constraints_expression(self, ocp: AcadosOcp, h0_expr, h_expr, he_expr, \ + def processing_acados_constraints_expression(self, ocp: AcadosOcp, h0_expr, h_expr, he_expr, state_tighten_list, input_tighten_list) -> AcadosOcp: '''Preprocess the constraints to be compatible with acados. - Args: + Args: h0_expr (casadi expression): initial state constraints h_expr (casadi expression): state and input constraints he_expr (casadi expression): terminal state constraints @@ -335,34 +335,34 @@ def processing_acados_constraints_expression(self, ocp: AcadosOcp, h0_expr, h_ex input_tighten_list (list): list of casadi SX variables for input constraint tightening Returns: ocp (AcadosOcp): acados ocp object with constraints set - + Note: all constraints in safe-control-gym are defined as g(x, u) <= constraint_tol However, acados requires the constraints to be defined as lb <= g(x, u) <= ub Thus, a large negative number (-1e8) is used as the lower bound. - See: https://github.com/acados/acados/issues/650 + See: https://github.com/acados/acados/issues/650 An alternative way to set the constraints is to use bounded constraints of acados: # bounded input constraints idxbu = np.where(np.sum(self.env.constraints.input_constraints[0].constraint_filter, axis=0) != 0)[0] ocp.constraints.Jbu = np.eye(nu) ocp.constraints.lbu = self.env.constraints.input_constraints[0].lower_bounds - ocp.constraints.ubu = self.env.constraints.input_constraints[0].upper_bounds + ocp.constraints.ubu = self.env.constraints.input_constraints[0].upper_bounds ocp.constraints.idxbu = idxbu # active constraints dimension ''' - # NOTE: only the upper bound is tightened due to constraint are defined in the + # NOTE: only the upper bound is tightened due to constraint are defined in the # form of g(x, u) <= constraint_tol in safe-control-gym # lambda functions to set the upper and lower bounds of the chance constraints - constraint_ub_chance = lambda constraint: -self.constraint_tol * np.ones(constraint.shape) - constraint_lb_chance = lambda constraint: -1e8 * np.ones(constraint.shape) + def constraint_ub_chance(constraint): return -self.constraint_tol * np.ones(constraint.shape) + def constraint_lb_chance(constraint): return -1e8 * np.ones(constraint.shape) state_tighten_var = cs.vertcat(*state_tighten_list) input_tighten_var = cs.vertcat(*input_tighten_list) - - ub = {'h': constraint_ub_chance(h_expr - cs.vertcat(state_tighten_var, input_tighten_var)), \ - 'h0': constraint_ub_chance(h0_expr - cs.vertcat(state_tighten_var, input_tighten_var)),\ + + ub = {'h': constraint_ub_chance(h_expr - cs.vertcat(state_tighten_var, input_tighten_var)), + 'h0': constraint_ub_chance(h0_expr - cs.vertcat(state_tighten_var, input_tighten_var)), 'he': constraint_ub_chance(he_expr - state_tighten_var)} - lb = {'h': constraint_lb_chance(h_expr), 'h0': constraint_lb_chance(h0_expr),\ + lb = {'h': constraint_lb_chance(h_expr), 'h0': constraint_lb_chance(h0_expr), 'he': constraint_lb_chance(he_expr)} # make sure all the ub and lb are 1D casaadi SX variables @@ -381,7 +381,7 @@ def processing_acados_constraints_expression(self, ocp: AcadosOcp, h0_expr, h_ex ocp.model.con_h_expr = h_expr - cs.vertcat(state_tighten_var, input_tighten_var) ocp.model.con_h_expr_e = he_expr - state_tighten_var ocp.dims.nh_0, ocp.dims.nh, ocp.dims.nh_e = \ - h0_expr.shape[0], h_expr.shape[0], he_expr.shape[0] + h0_expr.shape[0], h_expr.shape[0], he_expr.shape[0] # assign constraints upper and lower bounds ocp.constraints.uh_0 = ub['h0'] ocp.constraints.lh_0 = lb['h0'] @@ -404,15 +404,15 @@ def select_action(self, obs, info=None): self.last_action = action print('gpmpc acados action selection time:', time_after - time_before) return action - + def select_action_with_gp(self, obs): nx, nu = self.model.nx, self.model.nu ny = nx + nu ny_e = nx # set initial condition (0-th state) - self.acados_ocp_solver.set(0, "lbx", obs) - self.acados_ocp_solver.set(0, "ubx", obs) + self.acados_ocp_solver.set(0, 'lbx', obs) + self.acados_ocp_solver.set(0, 'ubx', obs) if self.warmstart: if self.x_guess is None or self.u_guess is None: if self.compute_ipopt_initial_guess: @@ -426,36 +426,36 @@ def select_action_with_gp(self, obs): self.u_guess = np.zeros((nu, self.T)) for idx in range(self.T + 1): init_x = self.x_guess[:, idx] - self.acados_ocp_solver.set(idx, "x", init_x) + self.acados_ocp_solver.set(idx, 'x', init_x) for idx in range(self.T): if nu == 1: init_u = np.array([self.u_guess[idx]]) else: init_u = self.u_guess[:, idx] - self.acados_ocp_solver.set(idx, "u", init_u) + self.acados_ocp_solver.set(idx, 'u', init_u) else: for idx in range(self.T + 1): - self.acados_ocp_solver.set(idx, "x", obs) + self.acados_ocp_solver.set(idx, 'x', obs) for idx in range(self.T): - self.acados_ocp_solver.set(idx, "u", np.zeros((nu,))) + self.acados_ocp_solver.set(idx, 'u', np.zeros((nu,))) # Set the probabilistic state and input constraint set limits. - # Tightening at the first step is possible if self.compute_initial_guess is used + # Tightening at the first step is possible if self.compute_initial_guess is used time_before = time.time() state_constraint_set_prev, input_constraint_set_prev = self.precompute_probabilistic_limits() time_after = time.time() print('precompute_probabilistic_limits time:', time_after - time_before) - + # for si in range(len(self.constraints.state_constraints)): # tighten initial and path constraints for idx in range(self.T): state_constraint_set = state_constraint_set_prev[0][:, idx] input_constraint_set = input_constraint_set_prev[0][:, idx] tighten_value = np.concatenate((state_constraint_set, input_constraint_set)) - self.acados_ocp_solver.set(idx, "p", tighten_value) + self.acados_ocp_solver.set(idx, 'p', tighten_value) # set terminal state constraints tighten_value = np.concatenate((state_constraint_set_prev[0][:, self.T], np.zeros((2 * nu,)))) - self.acados_ocp_solver.set(self.T, "p", tighten_value) + self.acados_ocp_solver.set(self.T, 'p', tighten_value) # print('tighten_value:', tighten_value) # print('state_constraint_set_prev[0][:, self.T]:', state_constraint_set_prev[0][:, self.T]) @@ -465,9 +465,9 @@ def select_action_with_gp(self, obs): self.traj_step += 1 for idx in range(self.T): y_ref = np.concatenate((goal_states[:, idx], np.zeros((nu,)))) - self.acados_ocp_solver.set(idx, "yref", y_ref) - y_ref_e = goal_states[:, -1] - self.acados_ocp_solver.set(self.T, "yref", y_ref_e) + self.acados_ocp_solver.set(idx, 'yref', y_ref) + y_ref_e = goal_states[:, -1] + self.acados_ocp_solver.set(self.T, 'yref', y_ref_e) # solve the optimization problem # try: @@ -477,17 +477,17 @@ def select_action_with_gp(self, obs): status = self.acados_ocp_solver.solve() # feedback phase - self.acados_ocp_solver.options_set('rti_phase', 2) + self.acados_ocp_solver.options_set('rti_phase', 2) status = self.acados_ocp_solver.solve() - + if status not in [0, 2]: self.acados_ocp_solver.print_statistics() raise Exception(f'acados returned status {status}. Exiting.') # print(f"acados returned status {status}. ") if status == 2: - print(f"acados returned status {status}. ") - - action = self.acados_ocp_solver.get(0, "u") + print(f'acados returned status {status}. ') + + action = self.acados_ocp_solver.get(0, 'u') else: status = self.acados_ocp_solver.solve() @@ -496,15 +496,15 @@ def select_action_with_gp(self, obs): raise Exception(f'acados returned status {status}. Exiting.') # print(f"acados returned status {status}. ") if status == 2: - print(f"acados returned status {status}. ") - action = self.acados_ocp_solver.get(0, "u") + print(f'acados returned status {status}. ') + action = self.acados_ocp_solver.get(0, 'u') # except Exception as e: # print(f"========== acados solver failed with error: {e} =============") # print('using prior controller') # action = self.prior_ctrl.select_action(obs) return action - + def reset(self): '''Reset the controller before running.''' # Setup reference input. @@ -516,12 +516,12 @@ def reset(self): self.traj = self.env.X_GOAL.T self.traj_step = 0 # Dynamics model. - + if self.gaussian_process is not None: self.set_gp_dynamics_func(self.n_ind_points) self.setup_acados_model() self.setup_acados_optimizer() - # n_ind_points = self.train_data['train_targets'].shape[0] + # n_ind_points = self.train_data['train_targets'].shape[0] print('=========== Resetting prior controller ===========') self.prior_ctrl.reset() self.setup_results_dict() @@ -531,4 +531,3 @@ def reset(self): self.x_guess = None self.u_guess = None - diff --git a/safe_control_gym/controllers/mpc/mpc.py b/safe_control_gym/controllers/mpc/mpc.py index bffb64af3..5de45cf78 100644 --- a/safe_control_gym/controllers/mpc/mpc.py +++ b/safe_control_gym/controllers/mpc/mpc.py @@ -82,7 +82,7 @@ def __init__( # print(self.env.__dir__()) # print('self.env.X_GOAL', self.env.X_GOAL) - # NOTE: The naming X_EQ and U_EQ can be confusing + # NOTE: The naming X_EQ and U_EQ can be confusing self.X_EQ = self.env.X_GOAL self.U_EQ = self.env.U_GOAL self.init_solver = 'ipopt' @@ -163,7 +163,7 @@ def compute_lqr_initial_guess(self, init_state, goal_states, x_lin, u_lin): dfdx = dfdxdfdu['dfdx'].toarray() dfdu = dfdxdfdu['dfdu'].toarray() lqr_gain, _, _ = compute_discrete_lqr_gain_from_cont_linear_system(dfdx, dfdu, self.Q, self.R, self.dt) - + # initialize the guess solutions x_guess = np.zeros((self.model.nx, self.T + 1)) u_guess = np.zeros((self.model.nu, self.T)) @@ -175,20 +175,20 @@ def compute_lqr_initial_guess(self, init_state, goal_states, x_lin, u_lin): x_guess[:, i + 1, None] = self.dynamics_func(x0=x_guess[:, i], p=u)['xf'].toarray() return x_guess, u_guess - + def compute_initial_guess(self, init_state, goal_states): time_before = time.time() '''Use IPOPT to get an initial guess of the ''' self.setup_optimizer(solver=self.init_solver) opti_dict = self.opti_dict opti = opti_dict['opti'] - x_var = opti_dict['x_var'] # optimization variables - u_var = opti_dict['u_var'] # optimization variables - x_init = opti_dict['x_init'] # initial state - x_ref = opti_dict['x_ref'] # reference state/trajectory + x_var = opti_dict['x_var'] # optimization variables + u_var = opti_dict['u_var'] # optimization variables + x_init = opti_dict['x_init'] # initial state + x_ref = opti_dict['x_ref'] # reference state/trajectory # Assign the initial state. - opti.set_value(x_init, init_state) # initial state should have dim (nx,) + opti.set_value(x_init, init_state) # initial state should have dim (nx,) # Assign reference trajectory within horizon. goal_states = self.get_references() opti.set_value(x_ref, goal_states) @@ -207,7 +207,7 @@ def compute_initial_guess(self, init_state, goal_states): # set the solver back self.setup_optimizer(solver=self.solver) - + time_after = time.time() print('MPC _compute_initial_guess time: ', time_after - time_before) @@ -295,7 +295,7 @@ def setup_optimizer(self, solver='qrsqp'): # print(opti) # exit() - + self.opti_dict = { 'opti': opti, 'x_var': x_var, @@ -321,10 +321,10 @@ def select_action(self, time_before = time.time() opti_dict = self.opti_dict opti = opti_dict['opti'] - x_var = opti_dict['x_var'] # optimization variables - u_var = opti_dict['u_var'] # optimization variables - x_init = opti_dict['x_init'] # initial state - x_ref = opti_dict['x_ref'] # reference state/trajectory + x_var = opti_dict['x_var'] # optimization variables + u_var = opti_dict['u_var'] # optimization variables + x_init = opti_dict['x_init'] # initial state + x_ref = opti_dict['x_ref'] # reference state/trajectory # Assign the initial state. opti.set_value(x_init, obs) @@ -335,13 +335,13 @@ def select_action(self, self.traj_step += 1 if self.warmstart and self.x_prev is None and self.u_prev is None: - # x_guess, u_guess = self.compute_lqr_initial_guess(obs, goal_states, self.X_EQ, self.U_EQ) - print(f'computing initial guess with {self.init_solver}') - x_guess, u_guess = self.compute_initial_guess(obs, goal_states) - opti.set_initial(x_var, x_guess) - opti.set_initial(u_var, u_guess) # Initial guess for optimization problem. + # x_guess, u_guess = self.compute_lqr_initial_guess(obs, goal_states, self.X_EQ, self.U_EQ) + print(f'computing initial guess with {self.init_solver}') + x_guess, u_guess = self.compute_initial_guess(obs, goal_states) + opti.set_initial(x_var, x_guess) + opti.set_initial(u_var, u_guess) # Initial guess for optimization problem. elif self.warmstart and self.x_prev is not None and self.u_prev is not None: - # if self.warmstart and self.x_prev is not None and self.u_prev is not None: + # if self.warmstart and self.x_prev is not None and self.u_prev is not None: # shift previous solutions by 1 step x_guess = deepcopy(self.x_prev) u_guess = deepcopy(self.u_prev) @@ -375,7 +375,7 @@ def select_action(self, u_val = opti.debug.value(u_var) x_val = opti.debug.value(x_var) skip = 8 - print('x_val: ', x_val[:,::skip]) + print('x_val: ', x_val[:, ::skip]) print('u_val: ', u_val[::skip]) self.x_prev = x_val self.u_prev = u_val diff --git a/safe_control_gym/controllers/mpc/mpc_acados.py b/safe_control_gym/controllers/mpc/mpc_acados.py index 8f9a70f71..936cb1e2f 100644 --- a/safe_control_gym/controllers/mpc/mpc_acados.py +++ b/safe_control_gym/controllers/mpc/mpc_acados.py @@ -5,6 +5,7 @@ import casadi as cs import numpy as np import scipy +from acados_template import AcadosModel, AcadosOcp, AcadosOcpSolver, AcadosSimSolver from safe_control_gym.controllers.base_controller import BaseController from safe_control_gym.controllers.mpc.mpc_utils import (compute_discrete_lqr_gain_from_cont_linear_system, @@ -13,7 +14,6 @@ from safe_control_gym.envs.benchmark_env import Task from safe_control_gym.envs.constraints import GENERAL_CONSTRAINTS, create_constraint_list -from acados_template import AcadosOcp, AcadosOcpSolver, AcadosSimSolver, AcadosModel class MPC_ACADOS(BaseController): '''MPC with full nonlinear model.''' @@ -85,10 +85,10 @@ def __init__( # print(self.env.__dir__()) # print('self.env.X_GOAL', self.env.X_GOAL) - # NOTE: The naming X_EQ and U_EQ can be confusing + # NOTE: The naming X_EQ and U_EQ can be confusing self.X_EQ = self.env.X_GOAL self.U_EQ = self.env.U_GOAL - + # warm-starting self.init_solver = 'ipopt' self.x_guess = None @@ -99,9 +99,8 @@ def __init__( self.set_dynamics_func() self.setup_acados_model() self.setup_acados_optimizer() - self.acados_ocp_solver = AcadosOcpSolver(self.ocp) # , \ - # json_file=f'acados_{self.ocp.model.name}.json') - + self.acados_ocp_solver = AcadosOcpSolver(self.ocp) # , \ + # json_file=f'acados_{self.ocp.model.name}.json') def add_constraints(self, constraints @@ -175,7 +174,7 @@ def set_dynamics_func(self): def setup_acados_model(self) -> AcadosModel: model_name = self.env.NAME - + acados_model = AcadosModel() acados_model.x = self.model.x_sym # acados_model.xdot = self.model.x_dot_acados # must be symbolic @@ -184,10 +183,10 @@ def setup_acados_model(self) -> AcadosModel: # set up rk4 (acados need symbolic expression of dynamics, not function) k1 = self.model.fc_func(acados_model.x, acados_model.u) - k2 = self.model.fc_func(acados_model.x + self.dt/2 * k1, acados_model.u) - k3 = self.model.fc_func(acados_model.x + self.dt/2 * k2, acados_model.u) + k2 = self.model.fc_func(acados_model.x + self.dt / 2 * k1, acados_model.u) + k3 = self.model.fc_func(acados_model.x + self.dt / 2 * k2, acados_model.u) k4 = self.model.fc_func(acados_model.x + self.dt * k3, acados_model.u) - f_disc = acados_model.x + self.dt/6 * (k1 + 2*k2 + 2*k3 + k4) + f_disc = acados_model.x + self.dt / 6 * (k1 + 2 * k2 + 2 * k3 + k4) acados_model.disc_dyn_expr = f_disc # f_expl = self.model.x_dot @@ -201,14 +200,14 @@ def setup_acados_model(self) -> AcadosModel: acados_model.t_label = 'time' self.acados_model = acados_model - + # def compute_lqr_initial_guess(self, init_state, goal_states, x_lin, u_lin): # '''Use LQR to get an initial guess of the ''' # dfdxdfdu = self.model.df_func(x=x_lin, u=u_lin) # dfdx = dfdxdfdu['dfdx'].toarray() # dfdu = dfdxdfdu['dfdu'].toarray() # lqr_gain, _, _ = compute_discrete_lqr_gain_from_cont_linear_system(dfdx, dfdu, self.Q, self.R, self.dt) - + # # initialize the guess solutions # x_guess = np.zeros((self.model.nx, self.T + 1)) # u_guess = np.zeros((self.model.nu, self.T)) @@ -219,26 +218,26 @@ def setup_acados_model(self) -> AcadosModel: # u_guess[:, i] = u # x_guess[:, i + 1, None] = self.dynamics_func(x0=x_guess[:, i], p=u)['xf'].toarray() # return x_guess, u_guess - + def compute_initial_guess(self, init_state, goal_states): time_before = time.time() '''Use IPOPT to get an initial guess of the ''' self.setup_optimizer(solver=self.init_solver) opti_dict = self.opti_dict opti = opti_dict['opti'] - x_var = opti_dict['x_var'] # optimization variables - u_var = opti_dict['u_var'] # optimization variables - x_init = opti_dict['x_init'] # initial state - x_ref = opti_dict['x_ref'] # reference state/trajectory + x_var = opti_dict['x_var'] # optimization variables + u_var = opti_dict['u_var'] # optimization variables + x_init = opti_dict['x_init'] # initial state + x_ref = opti_dict['x_ref'] # reference state/trajectory # Assign the initial state. - opti.set_value(x_init, init_state) # initial state should have dim (nx,) + opti.set_value(x_init, init_state) # initial state should have dim (nx,) # Assign reference trajectory within horizon. goal_states = self.get_references() opti.set_value(x_ref, goal_states) # if self.mode == 'tracking': # self.traj_step += 1 - # Solve the optimization problem. + # Solve the optimization problem. try: sol = opti.solve() x_val, u_val = sol.value(x_var), sol.value(u_var) @@ -263,7 +262,7 @@ def setup_acados_optimizer(self): ocp.model = self.acados_model # set dimensions - ocp.dims.N = self.T # prediction horizon + ocp.dims.N = self.T # prediction horizon # set cost (NOTE: safe-control-gym uses quadratic cost) ocp.cost.cost_type = 'LINEAR_LS' @@ -273,7 +272,7 @@ def setup_acados_optimizer(self): ocp.cost.Vx = np.zeros((ny, nx)) ocp.cost.Vx[:nx, :nx] = np.eye(nx) ocp.cost.Vu = np.zeros((ny, nu)) - ocp.cost.Vu[nx:(nx+nu), :nu] = np.eye(nu) + ocp.cost.Vu[nx:(nx + nu), :nu] = np.eye(nu) ocp.cost.Vx_e = np.eye(nx) # placeholder y_ref and y_ref_e (will be set in select_action) ocp.cost.yref = np.zeros((ny, )) @@ -284,7 +283,7 @@ def setup_acados_optimizer(self): # idxbu = np.where(np.sum(self.env.constraints.input_constraints[0].constraint_filter, axis=0) != 0)[0] # ocp.constraints.Jbu = np.eye(nu) # ocp.constraints.lbu = self.env.constraints.input_constraints[0].lower_bounds - # ocp.constraints.ubu = self.env.constraints.input_constraints[0].upper_bounds + # ocp.constraints.ubu = self.env.constraints.input_constraints[0].upper_bounds # ocp.constraints.idxbu = idxbu # active constraints dimension # # bounded state constraints # idxbx = np.where(np.sum(self.env.constraints.state_constraints[0].constraint_filter, axis=0) != 0)[0] @@ -309,7 +308,7 @@ def setup_acados_optimizer(self): h_expr_list = state_constraint_expr_list + input_constraint_expr_list h_expr = cs.vertcat(*h_expr_list) h0_expr = cs.vertcat(*h_expr_list) - he_expr = cs.vertcat(*state_constraint_expr_list) # terminal constraints are only state constraints + he_expr = cs.vertcat(*state_constraint_expr_list) # terminal constraints are only state constraints # pass the constraints to the ocp object ocp = self.processing_acados_constraints_expression(ocp, h0_expr, h_expr, he_expr) @@ -324,13 +323,13 @@ def setup_acados_optimizer(self): L1_pen = 1e4 ocp.cost.Zu = L2_pen * np.ones(h_expr.shape[0]) ocp.cost.Zl = L2_pen * np.ones(h_expr.shape[0]) - ocp.cost.zl = L1_pen * np.ones(h_expr.shape[0]) + ocp.cost.zl = L1_pen * np.ones(h_expr.shape[0]) ocp.cost.zu = L1_pen * np.ones(h_expr.shape[0]) ocp.cost.Zl_e = L2_pen * np.ones(he_expr.shape[0]) ocp.cost.Zu_e = L2_pen * np.ones(he_expr.shape[0]) ocp.cost.zl_e = L1_pen * np.ones(he_expr.shape[0]) ocp.cost.zu_e = L1_pen * np.ones(he_expr.shape[0]) - + # placeholder initial state constraint x_init = np.zeros((nx)) ocp.constraints.x0 = x_init @@ -348,34 +347,34 @@ def setup_acados_optimizer(self): def processing_acados_constraints_expression(self, ocp: AcadosOcp, h0_expr, h_expr, he_expr) -> AcadosOcp: '''Preprocess the constraints to be compatible with acados. - Args: + Args: h0_expr (casadi expression): initial state constraints h_expr (casadi expression): state and input constraints he_expr (casadi expression): terminal state constraints Returns: ocp (AcadosOcp): acados ocp object with constraints set. - + Note: all constraints in safe-control-gym are defined as g(x, u) <= constraint_tol However, acados requires the constraints to be defined as lb <= g(x, u) <= ub Thus, a large negative number (-1e8) is used as the lower bound. - See: https://github.com/acados/acados/issues/650 + See: https://github.com/acados/acados/issues/650 An alternative way to set the constraints is to use bounded constraints of acados: # bounded input constraints idxbu = np.where(np.sum(self.env.constraints.input_constraints[0].constraint_filter, axis=0) != 0)[0] ocp.constraints.Jbu = np.eye(nu) ocp.constraints.lbu = self.env.constraints.input_constraints[0].lower_bounds - ocp.constraints.ubu = self.env.constraints.input_constraints[0].upper_bounds + ocp.constraints.ubu = self.env.constraints.input_constraints[0].upper_bounds ocp.constraints.idxbu = idxbu # active constraints dimension ''' # lambda functions to set the upper and lower bounds of the constraints - constraint_ub = lambda constraint: -self.constraint_tol * np.ones(constraint.shape) - constraint_lb = lambda constraint: -1e8 * np.ones(constraint.shape) + def constraint_ub(constraint): return -self.constraint_tol * np.ones(constraint.shape) + def constraint_lb(constraint): return -1e8 * np.ones(constraint.shape) ub = {'h': constraint_ub(h_expr), 'h0': constraint_ub(h0_expr), 'he': constraint_ub(he_expr)} lb = {'h': constraint_lb(h_expr), 'h0': constraint_lb(h0_expr), 'he': constraint_lb(he_expr)} - # make sure all the ub and lb are 1D numpy arrays + # make sure all the ub and lb are 1D numpy arrays # (see: https://discourse.acados.org/t/infeasible-qps-when-using-nonlinear-casadi-constraint-expressions/1595/5?u=mxche) for key in ub.keys(): ub[key] = ub[key].flatten() if ub[key].ndim != 1 else ub[key] @@ -388,9 +387,9 @@ def processing_acados_constraints_expression(self, ocp: AcadosOcp, h0_expr, h_ex # pass the constraints to the ocp object ocp.model.con_h_expr_0, ocp.model.con_h_expr, ocp.model.con_h_expr_e = \ - h0_expr, h_expr, he_expr + h0_expr, h_expr, he_expr ocp.dims.nh_0, ocp.dims.nh, ocp.dims.nh_e = \ - h0_expr.shape[0], h_expr.shape[0], he_expr.shape[0] + h0_expr.shape[0], h_expr.shape[0], he_expr.shape[0] # assign constraints upper and lower bounds ocp.constraints.uh_0 = ub['h0'] ocp.constraints.lh_0 = lb['h0'] @@ -420,30 +419,30 @@ def select_action(self, ny = nx + nu ny_e = nx # set initial condition (0-th state) - self.acados_ocp_solver.set(0, "lbx", obs) - self.acados_ocp_solver.set(0, "ubx", obs) + self.acados_ocp_solver.set(0, 'lbx', obs) + self.acados_ocp_solver.set(0, 'ubx', obs) time_after_init = time.time() # warm-starting solver (otherwise, zeros by default) time_before_warmstart = time.time() if self.warmstart: - if self.x_guess is None or self.u_guess is None: - # compute initial guess with IPOPT - self.compute_initial_guess(obs, self.get_references()) + if self.x_guess is None or self.u_guess is None: + # compute initial guess with IPOPT + self.compute_initial_guess(obs, self.get_references()) for idx in range(self.T + 1): init_x = self.x_guess[:, idx] - self.acados_ocp_solver.set(idx, "x", init_x) + self.acados_ocp_solver.set(idx, 'x', init_x) for idx in range(self.T): if nu == 1: init_u = np.array([self.u_guess[idx]]) else: init_u = self.u_guess[:, idx] - self.acados_ocp_solver.set(idx, "u", init_u) + self.acados_ocp_solver.set(idx, 'u', init_u) else: for idx in range(self.T + 1): - self.acados_ocp_solver.set(idx, "x", obs) + self.acados_ocp_solver.set(idx, 'x', obs) for idx in range(self.T): - self.acados_ocp_solver.set(idx, "u", np.zeros((nu,))) + self.acados_ocp_solver.set(idx, 'u', np.zeros((nu,))) time_after_warmstart = time.time() # set reference for the control horizon @@ -452,17 +451,17 @@ def select_action(self, time_after_get_ref = time.time() if self.mode == 'tracking': self.traj_step += 1 - + y_ref = np.concatenate((goal_states[:, :-1], np.zeros((nu, self.T)))) time_before_for_loop = time.time() - for idx in range(self.T): - self.acados_ocp_solver.set(idx, "yref", y_ref[:, idx]) + for idx in range(self.T): + self.acados_ocp_solver.set(idx, 'yref', y_ref[:, idx]) # y_ref = np.concatenate((goal_states[:, idx], np.zeros((nu,)))) # self.acados_ocp_solver.set(idx, "yref", y_ref) time_after_for_loop = time.time() time_before_set_final_ref = time.time() - y_ref_e = goal_states[:, -1] - self.acados_ocp_solver.set(self.T, "yref", y_ref_e) + y_ref_e = goal_states[:, -1] + self.acados_ocp_solver.set(self.T, 'yref', y_ref_e) time_after_set_final_ref = time.time() # solve the optimization problem @@ -475,7 +474,7 @@ def select_action(self, # feedback phase time_before_feedback = time.time() - self.acados_ocp_solver.options_set('rti_phase', 2) + self.acados_ocp_solver.options_set('rti_phase', 2) status = self.acados_ocp_solver.solve() time_after_feedback = time.time() @@ -484,9 +483,9 @@ def select_action(self, raise Exception(f'acados returned status {status}. Exiting.') # print(f"acados returned status {status}. ") if status == 2: - print(f"acados returned status {status}. ") - - action = self.acados_ocp_solver.get(0, "u") + print(f'acados returned status {status}. ') + + action = self.acados_ocp_solver.get(0, 'u') elif not self.use_RTI: status = self.acados_ocp_solver.solve() @@ -495,17 +494,17 @@ def select_action(self, raise Exception(f'acados returned status {status}. Exiting.') # print(f"acados returned status {status}. ") if status == 2: - print(f"acados returned status {status}. ") - action = self.acados_ocp_solver.get(0, "u") + print(f'acados returned status {status}. ') + action = self.acados_ocp_solver.get(0, 'u') # get the open-loop solution time_before_saving = time.time() self.x_prev = np.zeros((nx, self.T + 1)) self.u_prev = np.zeros((nu, self.T)) for i in range(self.T + 1): - self.x_prev[:, i] = self.acados_ocp_solver.get(i, "x") + self.x_prev[:, i] = self.acados_ocp_solver.get(i, 'x') for i in range(self.T): - self.u_prev[:, i] = self.acados_ocp_solver.get(i, "u") + self.u_prev[:, i] = self.acados_ocp_solver.get(i, 'u') if nu == 1: self.u_prev = self.u_prev.flatten() @@ -518,7 +517,6 @@ def select_action(self, self.prev_action = action time_after_saving = time.time() - time_after = time.time() print('Initialization time: ', time_after_init - time_before_init) print('Warm-starting time: ', time_after_warmstart - time_before_warmstart) diff --git a/safe_control_gym/controllers/mpc/sqp_gp_mpc.py b/safe_control_gym/controllers/mpc/sqp_gp_mpc.py index 646f3668f..e123b0854 100644 --- a/safe_control_gym/controllers/mpc/sqp_gp_mpc.py +++ b/safe_control_gym/controllers/mpc/sqp_gp_mpc.py @@ -1,6 +1,5 @@ - import time from copy import deepcopy from functools import partial @@ -15,16 +14,18 @@ from skopt.sampler import Lhs from safe_control_gym.controllers.lqr.lqr_utils import discretize_linear_system +from safe_control_gym.controllers.mpc.gp_mpc import GPMPC from safe_control_gym.controllers.mpc.gp_utils import (GaussianProcessCollection, ZeroMeanIndependentGPModel, covSEard, kmeans_centriods) from safe_control_gym.controllers.mpc.linear_mpc import MPC, LinearMPC from safe_control_gym.controllers.mpc.mpc import MPC -from safe_control_gym.controllers.mpc.gp_mpc import GPMPC from safe_control_gym.controllers.mpc.sqp_mpc import SQPMPC from safe_control_gym.envs.benchmark_env import Task + class SQPGPMPC(GPMPC): '''Implements a GP-MPC controller with SQP optimization.''' + def __init__( self, env_func, @@ -57,7 +58,7 @@ def __init__( output_dir: str = 'results/temp', **kwargs ): - + if prior_info is None or prior_info == {}: raise ValueError('SQPGPMPC requires prior_prop to be defined. You may use the real mass properties and then use prior_param_coeff to modify them accordingly.') prior_info['prior_prop'].update((prop, val * prior_param_coeff) for prop, val in prior_info['prior_prop'].items()) @@ -103,51 +104,51 @@ def __init__( # self.prior_ctrl.reset() # super().__init__() # TODO: check the inheritance of the class super().__init__( - env_func = env_func, - seed= seed, - horizon = horizon, - q_mpc = q_mpc, - r_mpc = r_mpc, - constraint_tol = constraint_tol, - additional_constraints = additional_constraints, - soft_constraints = soft_constraints, - warmstart = warmstart, - train_iterations = train_iterations, - test_data_ratio = test_data_ratio, - overwrite_saved_data = overwrite_saved_data, - optimization_iterations = optimization_iterations, - learning_rate = learning_rate, - normalize_training_data = normalize_training_data, - use_gpu = use_gpu, - gp_model_path = gp_model_path, - prob = prob, - initial_rollout_std = initial_rollout_std, - input_mask = input_mask, - target_mask = target_mask, - gp_approx = gp_approx, - sparse_gp = False, - n_ind_points = 50, - inducing_point_selection_method = 'kmeans', - recalc_inducing_points_at_every_step = False, - online_learning = online_learning, - prior_info = prior_info, + env_func=env_func, + seed=seed, + horizon=horizon, + q_mpc=q_mpc, + r_mpc=r_mpc, + constraint_tol=constraint_tol, + additional_constraints=additional_constraints, + soft_constraints=soft_constraints, + warmstart=warmstart, + train_iterations=train_iterations, + test_data_ratio=test_data_ratio, + overwrite_saved_data=overwrite_saved_data, + optimization_iterations=optimization_iterations, + learning_rate=learning_rate, + normalize_training_data=normalize_training_data, + use_gpu=use_gpu, + gp_model_path=gp_model_path, + prob=prob, + initial_rollout_std=initial_rollout_std, + input_mask=input_mask, + target_mask=target_mask, + gp_approx=gp_approx, + sparse_gp=False, + n_ind_points=50, + inducing_point_selection_method='kmeans', + recalc_inducing_points_at_every_step=False, + online_learning=online_learning, + prior_info=prior_info, # inertial_prop: list = [1.0], - prior_param_coeff = prior_param_coeff, - terminate_run_on_done = terminate_run_on_done, - output_dir = output_dir, + prior_param_coeff=prior_param_coeff, + terminate_run_on_done=terminate_run_on_done, + output_dir=output_dir, **kwargs) self.prior_ctrl = SQPMPC( - env_func = self.prior_env_func, - seed= seed, - horizon = horizon, - q_mpc = q_mpc, - r_mpc = r_mpc, - warmstart= warmstart, - soft_constraints= self.soft_constraints_params['prior_soft_constraints'], - terminate_run_on_done= terminate_run_on_done, - prior_info= prior_info, - output_dir= output_dir, - additional_constraints= additional_constraints) + env_func=self.prior_env_func, + seed=seed, + horizon=horizon, + q_mpc=q_mpc, + r_mpc=r_mpc, + warmstart=warmstart, + soft_constraints=self.soft_constraints_params['prior_soft_constraints'], + terminate_run_on_done=terminate_run_on_done, + prior_info=prior_info, + output_dir=output_dir, + additional_constraints=additional_constraints) # self.prior_ctrl = LinearMPC( # self.prior_env_func, # horizon=horizon, @@ -172,7 +173,7 @@ def __init__( self.data_inputs = None self.data_targets = None # self.prior_dynamics_func = self.prior_ctrl.linear_dynamics_func - self.prior_dynamics_func = self.prior_ctrl.dynamics_func # nonlinear prior + self.prior_dynamics_func = self.prior_ctrl.dynamics_func # nonlinear prior self.X_EQ = self.prior_ctrl.X_EQ self.U_EQ = self.prior_ctrl.U_EQ # GP and training parameters. @@ -213,7 +214,7 @@ def __init__( self.x_prev = None self.u_prev = None # exit() - + def set_lin_gp_dynamics_func(self): '''Updates symbolic dynamics with actual control frequency.''' # Original version, used in shooting. @@ -222,28 +223,28 @@ def set_lin_gp_dynamics_func(self): x_guess = cs.MX.sym('x_guess', self.model.nx, 1) u_guess = cs.MX.sym('u_guess', self.model.nu, 1) dfdxdfdu = self.model.df_func(x=x_guess, u=u_guess) - dfdx = dfdxdfdu['dfdx']#.toarray() - dfdu = dfdxdfdu['dfdu']#.toarray() - z = cs.MX.sym('z', self.model.nx + self.model.nu, 1) # query point (the linearization point) + dfdx = dfdxdfdu['dfdx'] # .toarray() + dfdu = dfdxdfdu['dfdu'] # .toarray() + z = cs.MX.sym('z', self.model.nx + self.model.nu, 1) # query point (the linearization point) Ad = cs.DM_eye(self.model.nx) + dfdx * self.dt Bd = dfdu * self.dt A_gp = self.gaussian_process.casadi_linearized_predict(z=z)['A'] B_gp = self.gaussian_process.casadi_linearized_predict(z=z)['B'] assert A_gp.shape == (self.model.nx, self.model.nx) assert B_gp.shape == (self.model.nx, self.model.nu) - A = Ad + A_gp # TODO: check why Bd is used here correctly + A = Ad + A_gp # TODO: check why Bd is used here correctly B = Bd + B_gp x_dot_lin = A @ delta_x + B @ delta_u - self.linear_gp_dynamics_func = cs.Function('linear_dynamics_func', - [delta_x, delta_u, x_guess, u_guess, z], - [x_dot_lin, A, B], - ['x0', 'p', 'x_guess', 'u_guess', 'z'], - ['xf', 'A', 'B']) + self.linear_gp_dynamics_func = cs.Function('linear_dynamics_func', + [delta_x, delta_u, x_guess, u_guess, z], + [x_dot_lin, A, B], + ['x0', 'p', 'x_guess', 'u_guess', 'z'], + ['xf', 'A', 'B']) self.dfdx = A self.dfdu = B - + def setup_sqp_gp_optimizer(self): - print(f'Setting up SQP GP MPC optimizer.') + print(f'Setting up SQP GP MPC optimizer.') before_optimizer_setup = time.time() nx, nu = self.model.nx, self.model.nu T = self.T @@ -279,12 +280,10 @@ def setup_sqp_gp_optimizer(self): # for input_constraint in self.constraints.input_constraints: # input_constraint_set.append(opti.parameter(input_constraint.num_constraints, T)) - - # Sparse GP mean postfactor matrix. (not used here!) # TODO: check if this is needed mean_post_factor = opti.parameter(len(self.target_mask), self.train_data['train_targets'].shape[0]) - + # cost (cumulative) cost = 0 cost_func = self.model.loss @@ -309,14 +308,14 @@ def setup_sqp_gp_optimizer(self): # Constraints for i in range(self.T): # Dynamics constraints using the dynamics of the prior and the mean of the GP. - next_state = self.linear_gp_dynamics_func(x0=x_var[:, i], p=u_var[:, i], \ - x_guess=x_guess[:,i], u_guess=u_guess[:,i], \ - z=z[:, i])['xf'] + next_state = self.linear_gp_dynamics_func(x0=x_var[:, i], p=u_var[:, i], + x_guess=x_guess[:, i], u_guess=u_guess[:, i], + z=z[:, i])['xf'] opti.subject_to(x_var[:, i + 1] == next_state) # TODO: probablistic constraints tightening for sc_i, state_constraint in enumerate(self.state_constraints_sym): opti.subject_to(state_constraint(x_var[:, i] + x_guess[:, i]) <= -self.constraint_tol) - + for ic_i, input_constraint in enumerate(self.input_constraints_sym): opti.subject_to(input_constraint(u_var[:, i] + u_guess[:, i]) <= -self.constraint_tol) @@ -326,7 +325,7 @@ def setup_sqp_gp_optimizer(self): # initial condiiton constraints opti.subject_to(x_var[:, 0] + x_guess[:, 0] == x_init) opti.minimize(cost) - # create solver + # create solver opts = {'expand': True} opti.solver(self.qp_solver, opts) self.opti_dict = { @@ -354,7 +353,7 @@ def select_action(self, obs, info=None): self.u_guess = u_val + self.u_guess self.x_guess = x_val + self.x_guess if np.linalg.norm(u_val - self.u_prev) < self.action_convergence_tol\ - and np.linalg.norm(x_val - self.x_prev) < self.action_convergence_tol: + and np.linalg.norm(x_val - self.x_prev) < self.action_convergence_tol: break self.u_prev, self.x_prev = u_val, x_val print(f'Number of SQP iterations: {i}') @@ -369,7 +368,7 @@ def select_action(self, obs, info=None): self.last_obs = obs self.last_action = action return action - + def select_action_with_sqp_gp(self, obs): if self.x_guess is None or self.u_guess is None: self.compute_initial_guess(obs, self.get_references()) @@ -386,7 +385,7 @@ def select_action_with_sqp_gp(self, obs): # Assign the initial state. opti.set_value(x_init, obs) - # Assign reference trajectory within horizon. + # Assign reference trajectory within horizon. goal_states = self.get_references() opti.set_value(x_ref, goal_states) opti.set_value(x_guess, self.x_guess) @@ -445,7 +444,7 @@ def reset(self): self.traj = self.env.X_GOAL.T self.traj_step = 0 # Dynamics model. - + if self.gaussian_process is not None: self.set_lin_gp_dynamics_func() self.setup_sqp_gp_optimizer() @@ -475,7 +474,7 @@ def preprocess_training_data(self, np.array: inputs for GP training, (N, nx+nu). np.array: targets for GP training, (N, nx). ''' - print("=========== Preprocessing training data for SQP ===========") + print('=========== Preprocessing training data for SQP ===========') # Get the predicted dynamics. This is a linear prior, thus we need to account for the fact that # it is linearized about an eq using self.X_GOAL and self.U_GOAL. x_pred_seq = self.prior_dynamics_func(x0=x_seq.T, diff --git a/safe_control_gym/controllers/mpc/sqp_mpc.py b/safe_control_gym/controllers/mpc/sqp_mpc.py index b21621a0b..54e6ee807 100644 --- a/safe_control_gym/controllers/mpc/sqp_mpc.py +++ b/safe_control_gym/controllers/mpc/sqp_mpc.py @@ -6,15 +6,17 @@ import casadi as cs import numpy as np +from safe_control_gym.controllers.lqr.lqr_utils import discretize_linear_system from safe_control_gym.controllers.mpc.mpc import MPC from safe_control_gym.controllers.mpc.mpc_utils import (compute_discrete_lqr_gain_from_cont_linear_system, compute_state_rmse, get_cost_weight_matrix, reset_constraints, rk_discrete) -from safe_control_gym.controllers.lqr.lqr_utils import discretize_linear_system from safe_control_gym.envs.benchmark_env import Task from safe_control_gym.envs.constraints import GENERAL_CONSTRAINTS, create_constraint_list + # from safe_control_gym.controllers.mpc.sqp_mpc_utils import get_cost + class SQPMPC(MPC): '''Model Predictive Control using Sequential Quadratic Programming (SQP).''' @@ -80,11 +82,11 @@ def __init__( self.env) self.additional_constraints = additional_constraintsList.constraints self.constraints, self.state_constraints_sym, self.input_constraints_sym \ - = reset_constraints(self.env.constraints.constraints - + self.additional_constraints) + = reset_constraints(self.env.constraints.constraints + + self.additional_constraints) else: self.constraints, self.state_constraints_sym, self.input_constraints_sym \ - = reset_constraints(self.env.constraints.constraints) + = reset_constraints(self.env.constraints.constraints) self.additional_constraints = [] # Model parameters self.model = self.get_prior(self.env) @@ -100,7 +102,7 @@ def __init__( # self.X_EQ = self.env.X_GOAL # self.U_EQ = self.env.U_GOAL - self.init_step_solver = 'ipopt' # for nonlinear warmstart + self.init_step_solver = 'ipopt' # for nonlinear warmstart self.qp_solver = 'qrqp' self.max_qp_iter = 50 self.action_convergence_tol = 1e-3 @@ -115,8 +117,8 @@ def set_lin_dynamics_func(self, exact=True): x_guess = cs.MX.sym('x_guess', self.model.nx, 1) u_guess = cs.MX.sym('u_guess', self.model.nu, 1) dfdxdfdu = self.model.df_func(x=x_guess, u=u_guess) - dfdx = dfdxdfdu['dfdx']#.toarray() - dfdu = dfdxdfdu['dfdu']#.toarray() + dfdx = dfdxdfdu['dfdx'] # .toarray() + dfdu = dfdxdfdu['dfdu'] # .toarray() # if exact: # NOTE: exact is not implemented because cs.expm is not supported # # M = cs.SX.zeros(self.model.nx + self.model.nu, self.model.nx + self.model.nu) # # M[:self.model.nx, :self.model.nx] = dfdx @@ -126,7 +128,7 @@ def set_lin_dynamics_func(self, exact=True): # Md = cs.expm(M * self.dt) # Ad = Md[:self.model.nx, :self.model.nx] # Bd = Md[:self.model.nx, self.model.nx:] - # else: + # else: Ad = cs.DM_eye(self.model.nx) + dfdx * self.dt Bd = dfdu * self.dt @@ -136,7 +138,6 @@ def set_lin_dynamics_func(self, exact=True): [x_dot_lin, Ad, Bd], ['x0', 'p', 'x_guess', 'u_guess'], ['xf', 'Ad', 'Bd']) - def reset(self): '''Prepares for training or evaluation.''' @@ -156,7 +157,7 @@ def reset(self): # Previously solved states & inputs, useful for warm start. # nominal solution - self.x_prev = None + self.x_prev = None self.u_prev = None # # previous delta solution self.x_guess = None @@ -167,7 +168,7 @@ def reset(self): # self.setup_optimizer() # self.setup_sqp_optimizer() self.setup_results_dict() - + def compute_initial_guess(self, init_state, goal_states): print('=============Computing initial guess=============') time_before = time.time() @@ -175,12 +176,12 @@ def compute_initial_guess(self, init_state, goal_states): self.setup_optimizer(solver=self.init_step_solver) opti_dict = self.opti_dict opti = opti_dict['opti'] - x_var = opti_dict['x_var'] # optimization variables - u_var = opti_dict['u_var'] # optimization variables - x_init = opti_dict['x_init'] # initial state - x_ref = opti_dict['x_ref'] # reference state/trajectory + x_var = opti_dict['x_var'] # optimization variables + u_var = opti_dict['u_var'] # optimization variables + x_init = opti_dict['x_init'] # initial state + x_ref = opti_dict['x_ref'] # reference state/trajectory # Assign the initial state. - opti.set_value(x_init, init_state) # initial state should have dim (nx,) + opti.set_value(x_init, init_state) # initial state should have dim (nx,) # Assign reference trajectory within horizon. goal_states = self.get_references() opti.set_value(x_ref, goal_states) @@ -249,8 +250,8 @@ def setup_sqp_optimizer(self): R=self.R)['l'] for i in range(self.T): # Dynamics constraints. - next_state = self.linear_dynamics_func(x0=x_var[:, i], p=u_var[:, i], - x_guess=x_guess[:,i], u_guess=u_guess[:,i])['xf'] + next_state = self.linear_dynamics_func(x0=x_var[:, i], p=u_var[:, i], + x_guess=x_guess[:, i], u_guess=u_guess[:, i])['xf'] opti.subject_to(x_var[:, i + 1] == next_state) # State and input constraints soft_con_coeff = 10 @@ -282,7 +283,7 @@ def setup_sqp_optimizer(self): # initial condition constraints opti.subject_to(x_var[:, 0] + x_guess[:, 0] == x_init) opti.minimize(cost) - # create solver + # create solver opts = {'expand': True} # if platform == 'linux': # opts.update({'print_time': 1, 'print_header': 0}) @@ -306,10 +307,10 @@ def setup_sqp_optimizer(self): } after_optimizer_setup = time.time() print('MPC setup_sqp_optimizer time: ', after_optimizer_setup - before_optimizer_setup) - + def select_action(self, obs, info=None): before_select_action = time.time() - # use nonlinear solver to get an initial guess at initial step + # use nonlinear solver to get an initial guess at initial step if self.x_guess is None or self.u_guess is None: self.compute_initial_guess(obs, self.get_references()) @@ -343,9 +344,9 @@ def select_action(self, obs, info=None): return action def select_qp_action(self, - obs, - info=None - ): + obs, + info=None + ): '''Solve nonlinear mpc problem to get next action. Args: @@ -422,6 +423,3 @@ def select_qp_action(self, # action += self.u_guess[0] # self.prev_action = action # return action - - - \ No newline at end of file diff --git a/safe_control_gym/controllers/mpc/sqp_mpc_utils.py b/safe_control_gym/controllers/mpc/sqp_mpc_utils.py index 48e6c3224..ec1c42ea9 100644 --- a/safe_control_gym/controllers/mpc/sqp_mpc_utils.py +++ b/safe_control_gym/controllers/mpc/sqp_mpc_utils.py @@ -17,7 +17,7 @@ def get_cost(r, Q, n_lookahead): S (np.array): The cost matrix Q (np.array): The state cost matrix R (np.array): The actuation cost matrix - + ''' I_r = np.eye(r.shape[0]) @@ -33,6 +33,5 @@ def get_cost(r, Q, n_lookahead): assert S.shape[0] == nx * (n_lookahead + 1) + nu * n_lookahead assert cost_action.shape[0] == nu * n_lookahead assert cost_state.shape[0] == nx * (n_lookahead + 1) - - return S, cost_state, cost_action + return S, cost_state, cost_action diff --git a/safe_control_gym/controllers/pid/pid.py b/safe_control_gym/controllers/pid/pid.py index 7f32f9287..1c928d20c 100644 --- a/safe_control_gym/controllers/pid/pid.py +++ b/safe_control_gym/controllers/pid/pid.py @@ -147,8 +147,8 @@ def select_action(self, obs, info=None): action = self.KF * action**2 if self.env.QUAD_TYPE == 2: action = np.array([action[0] + action[3], action[1] + action[2]]) - elif self.env.QUAD_TYPE == 4: # 2D quadrotor with attitude control - action = np.array([self.env.attitude_control.pwm2thrust(thrust/3)*4, computed_target_rpy[1]]) + elif self.env.QUAD_TYPE == 4: # 2D quadrotor with attitude control + action = np.array([self.env.attitude_control.pwm2thrust(thrust / 3) * 4, computed_target_rpy[1]]) return action diff --git a/safe_control_gym/controllers/ppo/ppo.py b/safe_control_gym/controllers/ppo/ppo.py index 0c82b13fd..98578cbbe 100644 --- a/safe_control_gym/controllers/ppo/ppo.py +++ b/safe_control_gym/controllers/ppo/ppo.py @@ -332,6 +332,7 @@ def log_step(self, { 'ep_length': ep_lengths.mean(), 'ep_return': ep_returns.mean(), + 'ep_return_std': ep_returns.std(), 'ep_reward': (ep_returns / ep_lengths).mean(), 'ep_constraint_violation': ep_constraint_violation.mean() }, diff --git a/safe_control_gym/controllers/sac/sac.py b/safe_control_gym/controllers/sac/sac.py index 3496fc1d7..fb9b78cc8 100644 --- a/safe_control_gym/controllers/sac/sac.py +++ b/safe_control_gym/controllers/sac/sac.py @@ -368,6 +368,7 @@ def log_step(self, results): { 'ep_length': ep_lengths.mean(), 'ep_return': ep_returns.mean(), + 'ep_return_std': ep_returns.std(), 'ep_reward': (ep_returns / ep_lengths).mean(), 'ep_constraint_violation': ep_constraint_violation.mean() }, @@ -387,6 +388,7 @@ def log_step(self, results): { 'ep_length': eval_ep_lengths.mean(), 'ep_return': eval_ep_returns.mean(), + 'ep_return_std': eval_ep_returns.std(), 'ep_reward': (eval_ep_returns / eval_ep_lengths).mean(), 'constraint_violation': eval_constraint_violation.mean(), 'mse': eval_mse.mean() diff --git a/safe_control_gym/controllers/sac/sac_utils.py b/safe_control_gym/controllers/sac/sac_utils.py index 3e2e73e33..d4bbdfb44 100644 --- a/safe_control_gym/controllers/sac/sac_utils.py +++ b/safe_control_gym/controllers/sac/sac_utils.py @@ -149,7 +149,7 @@ def update(self, batch): # actor update policy_loss, entropy_loss = self.compute_policy_loss(batch) - if self.count%self.update_freq == 0: + if self.count % self.update_freq == 0: self.actor_opt.zero_grad() policy_loss.backward() self.actor_opt.step() @@ -166,7 +166,7 @@ def update(self, batch): self.critic_opt.step() # update target networks - if self.count%self.update_freq == 0: + if self.count % self.update_freq == 0: soft_update(self.ac, self.ac_targ, self.tau) self.count += 1 @@ -197,10 +197,10 @@ def __init__(self, obs_dim, act_dim, action_space, hidden_dims, activation, post # action rescaling (from cleanrl) self.register_buffer( - "action_scale", torch.tensor((action_space.high - action_space.low) / 2.0, dtype=torch.float32).flatten() + 'action_scale', torch.tensor((action_space.high - action_space.low) / 2.0, dtype=torch.float32).flatten() ) self.register_buffer( - "action_bias", torch.tensor((action_space.high + action_space.low) / 2.0, dtype=torch.float32).flatten() + 'action_bias', torch.tensor((action_space.high + action_space.low) / 2.0, dtype=torch.float32).flatten() ) def forward(self, obs, deterministic=False, with_logprob=True): @@ -237,7 +237,7 @@ def forward(self, obs, deterministic=False, with_logprob=True): logp = logp.sum(1, keepdim=True) else: logp = None - + return action, logp diff --git a/safe_control_gym/controllers/td3/td3.yaml b/safe_control_gym/controllers/td3/td3.yaml index 6cecd6667..023a28e66 100644 --- a/safe_control_gym/controllers/td3/td3.yaml +++ b/safe_control_gym/controllers/td3/td3.yaml @@ -9,16 +9,12 @@ clip_reward: 10. # loss args gamma: 0.99 tau: 0.005 -init_temperature: 0.2 -use_entropy_tuning: False -target_entropy: null # optim args train_interval: 100 train_batch_size: 64 actor_lr: 0.001 critic_lr: 0.001 -entropy_lr: 0.001 # runner args max_env_steps: 1000000 diff --git a/safe_control_gym/envs/__init__.py b/safe_control_gym/envs/__init__.py index 673e01a74..aca3777ce 100644 --- a/safe_control_gym/envs/__init__.py +++ b/safe_control_gym/envs/__init__.py @@ -1,4 +1,4 @@ -'''Register environments.''' +"""Register environments.""" from safe_control_gym.utils.registration import register @@ -16,4 +16,4 @@ register(idx='shower', entry_point='safe_control_gym.envs.test_shower.shower:ShowerEnv', - config_entry_point='safe_control_gym.envs.test_shower:shower.yaml') \ No newline at end of file + config_entry_point='safe_control_gym.envs.test_shower:shower.yaml') diff --git a/safe_control_gym/envs/benchmark_env.py b/safe_control_gym/envs/benchmark_env.py index 00b069368..33bda0faa 100644 --- a/safe_control_gym/envs/benchmark_env.py +++ b/safe_control_gym/envs/benchmark_env.py @@ -478,10 +478,10 @@ def after_step(self, obs, rew, done, info): info (dict): The info after this step. Returns: - obs (ndarray): The udpdated observation after this step. - rew (float): The udpdated reward after this step. + obs (ndarray): The updated observation after this step. + rew (float): The updated reward after this step. done (bool): Whether the evaluation is done. - info (dict): The udpdated info after this step. + info (dict): The updated info after this step. """ # Increment counters self.pyb_step_counter += self.PYB_STEPS_PER_CTRL diff --git a/safe_control_gym/envs/constraints.py b/safe_control_gym/envs/constraints.py index 89a40b06b..1a8d82b5e 100644 --- a/safe_control_gym/envs/constraints.py +++ b/safe_control_gym/envs/constraints.py @@ -420,9 +420,9 @@ def __init__(self, constrained_variable (ConstrainedVariableType): Specifies the input type to the constraint as a constraint that acts on the state, input, or both. bound (list, np.array): 1D array or list of the bounds. Length must match - the environemt observation space dimension. If none, the env defaults are used + the environment observation space dimension. If none, the env defaults are used strict (optional, bool): Whether the constraint is violated also when equal to its threshold. - active_dims (list of ints): Filters the constraint to only act on select certian dimensions. + active_dims (list of ints): Filters the constraint to only act on select certain dimensions. tolerance (list or np.array): The distance from the constraint at which is_almost_active returns True. decimals (optional, int): Specifies the number of decimal places to round the constraint evaluation too. ''' @@ -649,7 +649,7 @@ def create_constraint_list(constraint_specs, available_constraints, env): '''Creates a ConstraintList from yaml constraint specification. Args: - constraint_specs (list): List of dicts defining the constraints info. + constraint_specs (list): List of dicts defining the constraints' info. available_constraints (dict): Dict of the constraints that are available env (BenchmarkEnv): The environment for which the constraints will be applied ''' diff --git a/safe_control_gym/envs/gym_control/cartpole.py b/safe_control_gym/envs/gym_control/cartpole.py index c89f1eaef..86ebe8c95 100644 --- a/safe_control_gym/envs/gym_control/cartpole.py +++ b/safe_control_gym/envs/gym_control/cartpole.py @@ -420,7 +420,7 @@ def _setup_symbolic(self, prior_prop={}, **kwargs): Ur = cs.MX.sym('Ur', nu, 1) cost_func = 0.5 * (X - Xr).T @ Q @ (X - Xr) + 0.5 * (U - Ur).T @ R @ (U - Ur) # Define dynamics and cost dictionaries. - dynamics = {'dyn_eqn': X_dot, 'obs_eqn': Y, 'vars': {'X': X, 'U': U},} + dynamics = {'dyn_eqn': X_dot, 'obs_eqn': Y, 'vars': {'X': X, 'U': U}, } cost = {'cost_func': cost_func, 'vars': {'X': X, 'U': U, 'Xr': Xr, 'Ur': Ur, 'Q': Q, 'R': R}} # Additional params to cache params = { @@ -454,10 +454,10 @@ def _set_observation_space(self): self.x_dot_threshold = 10 self.theta_dot_threshold = 10 # Limit set to 2x: i.e. a failing observation is still within bounds. - obs_bound = np.array([self.x_threshold * 2, - self.x_dot_threshold, #np.finfo(np.float32).max, - self.theta_threshold_radians * 2, - self.theta_dot_threshold]) # np.finfo(np.float32).max + obs_bound = np.array([self.x_threshold * 2, + self.x_dot_threshold, # np.finfo(np.float32).max, + self.theta_threshold_radians * 2, + self.theta_dot_threshold]) # np.finfo(np.float32).max self.state_space = spaces.Box(low=-obs_bound, high=obs_bound, dtype=np.float32) # Concatenate goal info for RL diff --git a/safe_control_gym/envs/gym_pendulum/pendulum.py b/safe_control_gym/envs/gym_pendulum/pendulum.py index 91036b3dd..9ae809fad 100644 --- a/safe_control_gym/envs/gym_pendulum/pendulum.py +++ b/safe_control_gym/envs/gym_pendulum/pendulum.py @@ -292,7 +292,7 @@ def reset(self, seed=None, init_state=None): self.OVERRIDDEN_POLE_MASS = prop_values['pole_mass'] # See `slender rod`, https://en.wikipedia.org/wiki/List_of_moments_of_inertia. # OVERRIDDEN_POLE_INERTIA = (1 / 12) * self.OVERRIDDEN_POLE_MASS * (2 * self.OVERRIDDEN_EFFECTIVE_POLE_LENGTH)**2 - OVERRIDDEN_POLE_INERTIA = (1 / 3) * self.OVERRIDDEN_POLE_MASS * self.OVERRIDDEN_EFFECTIVE_POLE_LENGTH**2 # pole mass at the end of the rod + OVERRIDDEN_POLE_INERTIA = (1 / 3) * self.OVERRIDDEN_POLE_MASS * self.OVERRIDDEN_EFFECTIVE_POLE_LENGTH**2 # pole mass at the end of the rod # Load the cartpole with new urdf. override_urdf_tree = self._create_urdf(self.URDF_PATH, length=self.OVERRIDDEN_EFFECTIVE_POLE_LENGTH, inertia=OVERRIDDEN_POLE_INERTIA) # self.override_path = os.path.join(self.output_dir, f'pid-{os.getpid()}_id-{self.idx}_cartpole.urdf') @@ -306,7 +306,7 @@ def reset(self, seed=None, init_state=None): # Remove cache file after loading it into PyBullet. os.remove(self.override_path) # Pendulum settings. - # for link_idx in [-1, 0, 1]: # Slider, cart, and pole. # why is slider -1? + # for link_idx in [-1, 0, 1]: # Slider, cart, and pole. # why is slider -1? for link_idx in [-1, 0]: # fixed cart and pole. p.changeDynamics(self.PENDULUM_ID, linkIndex=link_idx, linearDamping=0, angularDamping=0, physicsClientId=self.PYB_CLIENT) # for joint_idx in [0, 1]: # Slider-to-cart and cart-to-pole joints. @@ -438,7 +438,7 @@ def _setup_symbolic(self, prior_prop={}, **kwargs): Xr = cs.MX.sym('Xr', nx, 1) Ur = cs.MX.sym('Ur', nu, 1) cost_func = 0.5 * (self.wrap_sym(X) - Xr).T @ Q @ (self.wrap_sym(X) - Xr) \ - + 0.5 * (U - Ur).T @ R @ (U - Ur) + + 0.5 * (U - Ur).T @ R @ (U - Ur) # cost_func = self.cost_func(X, U, Xr, Ur, Q, R) # Define dynamics and cost dictionaries. # dynamics = {'dyn_eqn': X_dot, 'obs_eqn': Y, 'vars': {'X': X, 'U': U}} @@ -455,7 +455,7 @@ def _setup_symbolic(self, prior_prop={}, **kwargs): } # Setup symbolic model. self.symbolic = SymbolicModel(dynamics=dynamics, cost=cost, dt=dt, params=params) - + def wrap_sym(self, X): '''Wrap angle to [-pi, pi] when used in observation. @@ -680,7 +680,7 @@ def _get_reward(self): if self.TASK == Task.STABILIZATION: return float( -1 * self.symbolic.loss(x=self.state, - # -1 * self.symbolic.loss(x=state, + # -1 * self.symbolic.loss(x=state, Xr=self.X_GOAL, u=self.current_clipped_action, Ur=self.U_GOAL, @@ -714,7 +714,7 @@ def _get_done(self): # x, _, theta, _ = self.state theta, _ = self.state # if x < -self.x_threshold or x > self.x_threshold or theta < -self.theta_threshold_radians or theta > self.theta_threshold_radians: - if theta < -self.theta_threshold_radians or theta > self.theta_threshold_radians: + if theta < -self.theta_threshold_radians or theta > self.theta_threshold_radians: self.out_of_bounds = True return True self.out_of_bounds = False @@ -770,14 +770,14 @@ def _parse_urdf_parameters(self, file_name): ''' URDF_TREE = (etxml.parse(file_name)).getroot() # EFFECTIVE_POLE_LENGTH = 0.5 * float(URDF_TREE[3][0][0][0].attrib['size'].split(' ')[-1]) # Note: HALF length of pole. - # POLE_MASS = float(URDF_TREE[3][1][1].attrib['value']) - # CART_MASS = float(URDF_TREE[1][2][0].attrib['value']) - # return EFFECTIVE_POLE_LENGTH, POLE_MASS, CART_MASS - EFFECTIVE_POLE_LENGTH = float(URDF_TREE[1][0][0][0].attrib['size'].split(' ')[-1]) # Note: full length of pole. + # POLE_MASS = float(URDF_TREE[3][1][1].attrib['value']) + # CART_MASS = float(URDF_TREE[1][2][0].attrib['value']) + # return EFFECTIVE_POLE_LENGTH, POLE_MASS, CART_MASS + EFFECTIVE_POLE_LENGTH = float(URDF_TREE[1][0][0][0].attrib['size'].split(' ')[-1]) # Note: full length of pole. POLE_MASS = float(URDF_TREE[1][1][1].attrib['value']) - # print('pole length:', EFFECTIVE_POLE_LENGTH) - # print('pole mass:', POLE_MASS) - # print(URDF_TREE[3][0][0][0].attrib['size'].split(' ')[-1]) + # print('pole length:', EFFECTIVE_POLE_LENGTH) + # print('pole mass:', POLE_MASS) + # print(URDF_TREE[3][0][0][0].attrib['size'].split(' ')[-1]) return EFFECTIVE_POLE_LENGTH, POLE_MASS def _create_urdf(self, file_name, length=None, inertia=None): diff --git a/safe_control_gym/envs/gym_pybullet_drones/quadrotor.py b/safe_control_gym/envs/gym_pybullet_drones/quadrotor.py index dc1494b0a..a6328d783 100644 --- a/safe_control_gym/envs/gym_pybullet_drones/quadrotor.py +++ b/safe_control_gym/envs/gym_pybullet_drones/quadrotor.py @@ -15,7 +15,8 @@ from safe_control_gym.envs.benchmark_env import Cost, Task from safe_control_gym.envs.constraints import GENERAL_CONSTRAINTS from safe_control_gym.envs.gym_pybullet_drones.base_aviary import BaseAviary -from safe_control_gym.envs.gym_pybullet_drones.quadrotor_utils import QuadType, AttitudeControl, cmd2pwm, pwm2rpm +from safe_control_gym.envs.gym_pybullet_drones.quadrotor_utils import (AttitudeControl, QuadType, cmd2pwm, + pwm2rpm) from safe_control_gym.math_and_models.symbolic_systems import SymbolicModel from safe_control_gym.math_and_models.transformations import csRotXYZ, transform_trajectory @@ -166,11 +167,11 @@ def __init__(self, inertial_prop (ndarray, optional): The inertial properties of the environment (M, Ixx, Iyy, Izz). quad_type (QuadType, optional): The choice of motion type (1D along z, 2D in the x-z plane, or 3D). norm_act_scale (float): Scaling the [-1,1] action space around hover thrust when `normalized_action_space` is True. - obs_goal_horizon (int): How many future goal states to append to obervation. + obs_goal_horizon (int): How many future goal states to append to observation. rew_state_weight (list/ndarray): Quadratic weights for state in rl reward. rew_act_weight (list/ndarray): Quadratic weights for action in rl reward. - rew_exponential (bool): If to exponentiate negative quadratic cost to positive, bounded [0,1] reward. - done_on_out_of_bound (bool): If to termiante when state is out of bound. + rew_exponential (bool): If to exponential negative quadratic cost to positive, bounded [0,1] reward. + done_on_out_of_bound (bool): If to terminate when state is out of bound. info_mse_metric_state_weight (list/ndarray): Quadratic weights for state in mse calculation for info dict. """ @@ -505,12 +506,12 @@ def _setup_symbolic(self, prior_prop={}, **kwargs): prior_prop (dict): specify the prior inertial prop to use in the symbolic model. """ # if self.QUAD_TYPE is QuadType.TWO_D_ATTITUDE: - # params_pitch_rate = prior_prop.get('params_pitch_rate', + # params_pitch_rate = prior_prop.get('params_pitch_rate', # params_acc = # else: m = prior_prop.get('M', self.MASS) Iyy = prior_prop.get('Iyy', self.J[1, 1]) - + g, length = self.GRAVITY_ACC, self.L dt = self.CTRL_TIMESTEP # Define states. @@ -558,8 +559,8 @@ def _setup_symbolic(self, prior_prop={}, **kwargs): theta_dot = cs.MX.sym('theta_dot') X = cs.vertcat(x, x_dot, z, z_dot, theta, theta_dot) # Define input collective thrust and theta. - T = cs.MX.sym('T_c') # normlized thrust [N] - P = cs.MX.sym('P_c') # desired pitch angle [rad] + T = cs.MX.sym('T_c') # normlized thrust [N] + P = cs.MX.sym('P_c') # desired pitch angle [rad] U = cs.vertcat(T, P) # The thrust in PWM is converted from the normalized thrust. # With the formulat F_desired = b_F * T + a_F @@ -693,13 +694,13 @@ def _set_action_space(self): a_low = self.KF * n_mot * (self.PWM2RPM_SCALE * self.MIN_PWM + self.PWM2RPM_CONST)**2 a_high = self.KF * n_mot * (self.PWM2RPM_SCALE * self.MAX_PWM + self.PWM2RPM_CONST)**2 self.physical_action_bounds = (np.array([np.full(1, a_low, np.float32), np.full(1, -max_pitch_rad, np.float32)]).flatten(), - np.array([np.full(1, a_high, np.float32), np.full(1, max_pitch_rad, np.float32)]).flatten()) + np.array([np.full(1, a_high, np.float32), np.full(1, max_pitch_rad, np.float32)]).flatten()) else: n_mot = 4 / action_dim a_low = self.KF * n_mot * (self.PWM2RPM_SCALE * self.MIN_PWM + self.PWM2RPM_CONST)**2 a_high = self.KF * n_mot * (self.PWM2RPM_SCALE * self.MAX_PWM + self.PWM2RPM_CONST)**2 self.physical_action_bounds = (np.full(action_dim, a_low, np.float32), - np.full(action_dim, a_high, np.float32)) + np.full(action_dim, a_high, np.float32)) if self.NORMALIZED_RL_ACTION_SPACE: # Normalized thrust (around hover thrust). @@ -708,13 +709,13 @@ def _set_action_space(self): else: self.hover_thrust = self.GRAVITY_ACC * self.MASS / action_dim self.action_space = spaces.Box(low=-np.ones(action_dim), - high=np.ones(action_dim), - dtype=np.float32) + high=np.ones(action_dim), + dtype=np.float32) else: # Direct thrust control. self.action_space = spaces.Box(low=self.physical_action_bounds[0], - high=self.physical_action_bounds[1], - dtype=np.float32) + high=self.physical_action_bounds[1], + dtype=np.float32) def _set_observation_space(self): """Sets the observation space of the environment.""" @@ -748,7 +749,7 @@ def _set_observation_space(self): high = np.array([ self.x_threshold, self.x_dot_threshold, self.z_threshold, self.z_dot_threshold, - self.theta_threshold_radians, self.theta_dot_threshold_radians + self.theta_threshold_radians, self.theta_dot_threshold_radians ]) self.STATE_LABELS = ['x', 'x_dot', 'z', 'z_dot', 'theta', 'theta_dot'] self.STATE_UNITS = ['m', 'm/s', 'm', 'm/s', 'rad', 'rad/s'] @@ -831,19 +832,19 @@ def _preprocess_control(self, action): if self.adversary_disturbance == 'action': self.current_physical_action = self.current_physical_action + self.adv_action self.current_noisy_physical_action = self.current_physical_action - + if self.QUAD_TYPE == QuadType.TWO_D_ATTITUDE: collective_thrust, pitch = action - # rpm = self.attitude_control._dslPIDAttitudeControl(indivisual_thrust, + # rpm = self.attitude_control._dslPIDAttitudeControl(individual_thrust, # self.quat[0], np.array([0, pitch, 0])) # input thrsut is pwm # thrust_action = self.KF * rpm**2 - # thrust_action = self.attitude_control._dslPIDAttitudeControl(self.attitude_control.pwm2thrust(thrust_c/3), + # thrust_action = self.attitude_control._dslPIDAttitudeControl(self.attitude_control.pwm2thrust(thrust_c/3), # self.quat[0], np.array([0, pitch, 0])) # input thrsut is in Newton # print(f"collective_thrust: {collective_thrust}, pitch: {pitch}") - thrust_action = self.attitude_control._dslPIDAttitudeControl(collective_thrust/4, - self.quat[0], np.array([0, pitch, 0])) # input thrsut is in Newton + thrust_action = self.attitude_control._dslPIDAttitudeControl(collective_thrust / 4, + self.quat[0], np.array([0, pitch, 0])) # input thrsut is in Newton thrust = np.array([thrust_action[0] + thrust_action[3], thrust_action[1] + thrust_action[2]]) - thrust = np.clip(thrust, np.full(2, self.physical_action_bounds[0][0]/2), np.full(2, self.physical_action_bounds[1][0]/2)) + thrust = np.clip(thrust, np.full(2, self.physical_action_bounds[0][0] / 2), np.full(2, self.physical_action_bounds[1][0] / 2)) pitch = np.clip(pitch, self.physical_action_bounds[0][1], self.physical_action_bounds[1][1]) self.current_clipped_action = np.array([sum(thrust), pitch]) else: @@ -853,7 +854,7 @@ def _preprocess_control(self, action): # convert to quad motor rpm commands pwm = cmd2pwm(thrust, self.PWM2RPM_SCALE, self.PWM2RPM_CONST, self.KF, self.MIN_PWM, self.MAX_PWM) rpm = pwm2rpm(pwm, self.PWM2RPM_SCALE, self.PWM2RPM_CONST) - + return rpm def normalize_action(self, action): @@ -890,7 +891,7 @@ def denormalize_action(self, action): # hover_pwm = (self.HOVER_RPM - self.PWM2RPM_CONST) / self.PWM2RPM_SCALE # thrust = np.where(thrust <= 0, self.MIN_PWM + (thrust + 1) * (hover_pwm - self.MIN_PWM), # hover_pwm + (self.MAX_PWM - hover_pwm) * thrust) - + thrust = (1 + self.norm_act_scale * action[0]) * self.hover_thrust # thrust = self.attitude_control.thrust2pwm(thrust) diff --git a/safe_control_gym/envs/gym_pybullet_drones/quadrotor_utils.py b/safe_control_gym/envs/gym_pybullet_drones/quadrotor_utils.py index e33346cc8..5669ac144 100644 --- a/safe_control_gym/envs/gym_pybullet_drones/quadrotor_utils.py +++ b/safe_control_gym/envs/gym_pybullet_drones/quadrotor_utils.py @@ -1,7 +1,7 @@ -'''Helper functions for the quadrotor environment.''' +"""Helper functions for the quadrotor environment.""" -from enum import IntEnum from abc import ABC +from enum import IntEnum import numpy as np import pybullet as p @@ -9,7 +9,7 @@ class QuadType(IntEnum): - '''Quadrotor types numeration class.''' + """Quadrotor types numeration class.""" ONE_D = 1 # One-dimensional (along z) movement. TWO_D = 2 # Two-dimensional (in the x-z plane) movement. @@ -18,7 +18,7 @@ class QuadType(IntEnum): def cmd2pwm(thrust, pwm2rpm_scale, pwm2rpm_const, ct, pwm_min, pwm_max): - '''Generic cmd to pwm function. + """Generic cmd to pwm function. For 1D, thrust is the total of all 4 motors; for 2D, 1st thrust is total of motor 1 & 4, 2nd thrust is total of motor 2 & 3; for 4D, thrust is thrust of each motor. @@ -33,7 +33,7 @@ def cmd2pwm(thrust, pwm2rpm_scale, pwm2rpm_const, ct, pwm_min, pwm_max): Returns: ndarray: array of length 4 containing PWM. - ''' + """ n_motor = 4 // int(thrust.size) thrust = np.clip(thrust, np.zeros_like(thrust), None) # Make sure thrust is not negative. motor_pwm = (np.sqrt(thrust / n_motor / ct) - pwm2rpm_const) / pwm2rpm_scale @@ -50,7 +50,7 @@ def cmd2pwm(thrust, pwm2rpm_scale, pwm2rpm_const, ct, pwm_min, pwm_max): def pwm2rpm(pwm, pwm2rpm_scale, pwm2rpm_const): - '''Computes motor squared rpm from pwm. + """Computes motor squared rpm from pwm. Args: pwm (ndarray): Array of length 4 containing PWM. @@ -59,13 +59,13 @@ def pwm2rpm(pwm, pwm2rpm_scale, pwm2rpm_const): Returns: ndarray: Array of length 4 containing RPMs. - ''' + """ rpm = pwm2rpm_scale * pwm + pwm2rpm_const return rpm class AttitudeControl(ABC): - '''AttitudeControl Class.''' + """AttitudeControl Class.""" def __init__(self, control_timestep, @@ -80,7 +80,7 @@ def __init__(self, min_pwm: float = 20000, max_pwm: float = 65535, ): - '''AttitudeControl class __init__ method. + """AttitudeControl class __init__ method. Args: control_timestep (float): The time step at which control is computed. @@ -94,8 +94,8 @@ def __init__(self, pwm2rpm_const (float, optional): PWM-to-RPM constant factor. min_pwm (float, optional): Minimum PWM. max_pwm (float, optional): Maximum PWM. - ''' - + """ + self.g = g self.KF = kf self.KM = km @@ -118,7 +118,7 @@ def __init__(self, self.control_timestep = control_timestep def reset(self): - '''Reinitialize just the controller before a new run.''' + """Reinitialize just the controller before a new run.""" # Clear PID control variables. self.last_rpy = np.zeros(3) @@ -130,47 +130,47 @@ def _dslPIDAttitudeControl(self, target_euler, target_rpy_rates=np.zeros(3) ): - """DSL's CF2.x PID attitude control. - - Parameters - ---------- - thrust : ndarray - (4,1)-shaped array of target thrust (Newton) along the drone z-axis. - cur_quat : ndarray - (4,1)-shaped array of floats containing the current orientation as a quaternion. - target_euler : ndarray - (3,1)-shaped array of floats containing the computed target Euler angles. - target_rpy_rates : ndarray - (3,1)-shaped array of floats containing the desired roll, pitch, and yaw rates. - - Returns - ------- - ndarray - (4,1)-shaped array of integers containing the RPMs to apply to each of the 4 motors. - - """ - cur_rotation = np.array(p.getMatrixFromQuaternion(cur_quat)).reshape(3, 3) - cur_rpy = np.array(p.getEulerFromQuaternion(cur_quat)) - target_quat = (Rotation.from_euler('XYZ', target_euler, degrees=False)).as_quat() - w, x, y, z = target_quat - target_rotation = (Rotation.from_quat([w, x, y, z])).as_matrix() - rot_matrix_e = np.dot((target_rotation.transpose()), cur_rotation) - np.dot(cur_rotation.transpose(), target_rotation) - rot_e = np.array([rot_matrix_e[2, 1], rot_matrix_e[0, 2], rot_matrix_e[1, 0]]) - rpy_rates_e = target_rpy_rates - (cur_rpy - self.last_rpy) / self.control_timestep - self.last_rpy = cur_rpy - self.integral_rpy_e = self.integral_rpy_e - rot_e * self.control_timestep - self.integral_rpy_e = np.clip(self.integral_rpy_e, -1500., 1500.) - self.integral_rpy_e[0:2] = np.clip(self.integral_rpy_e[0:2], -1., 1.) - #### PID target torques #################################### - target_torques = - np.multiply(self.P_COEFF_TOR, rot_e) \ - + np.multiply(self.D_COEFF_TOR, rpy_rates_e) \ - + np.multiply(self.I_COEFF_TOR, self.integral_rpy_e) - target_torques = np.clip(target_torques, -3200, 3200) - # pwm = thrust + np.dot(self.MIXER_MATRIX, target_torques) - # pwm = np.clip(pwm, self.MIN_PWM, self.MAX_PWM) - # return self.PWM2RPM_SCALE * pwm + self.PWM2RPM_CONST - return thrust + self.pwm2thrust(np.dot(self.MIXER_MATRIX, target_torques)) - + """DSL's CF2.x PID attitude control. + + Parameters + ---------- + thrust : ndarray + (4,1)-shaped array of target thrust (Newton) along the drone z-axis. + cur_quat : ndarray + (4,1)-shaped array of floats containing the current orientation as a quaternion. + target_euler : ndarray + (3,1)-shaped array of floats containing the computed target Euler angles. + target_rpy_rates : ndarray + (3,1)-shaped array of floats containing the desired roll, pitch, and yaw rates. + + Returns + ------- + ndarray + (4,1)-shaped array of integers containing the RPMs to apply to each of the 4 motors. + + """ + cur_rotation = np.array(p.getMatrixFromQuaternion(cur_quat)).reshape(3, 3) + cur_rpy = np.array(p.getEulerFromQuaternion(cur_quat)) + target_quat = (Rotation.from_euler('XYZ', target_euler, degrees=False)).as_quat() + w, x, y, z = target_quat + target_rotation = (Rotation.from_quat([w, x, y, z])).as_matrix() + rot_matrix_e = np.dot((target_rotation.transpose()), cur_rotation) - np.dot(cur_rotation.transpose(), target_rotation) + rot_e = np.array([rot_matrix_e[2, 1], rot_matrix_e[0, 2], rot_matrix_e[1, 0]]) + rpy_rates_e = target_rpy_rates - (cur_rpy - self.last_rpy) / self.control_timestep + self.last_rpy = cur_rpy + self.integral_rpy_e = self.integral_rpy_e - rot_e * self.control_timestep + self.integral_rpy_e = np.clip(self.integral_rpy_e, -1500., 1500.) + self.integral_rpy_e[0:2] = np.clip(self.integral_rpy_e[0:2], -1., 1.) + #### PID target torques #################################### + target_torques = - np.multiply(self.P_COEFF_TOR, rot_e) \ + + np.multiply(self.D_COEFF_TOR, rpy_rates_e) \ + + np.multiply(self.I_COEFF_TOR, self.integral_rpy_e) + target_torques = np.clip(target_torques, -3200, 3200) + # pwm = thrust + np.dot(self.MIXER_MATRIX, target_torques) + # pwm = np.clip(pwm, self.MIN_PWM, self.MAX_PWM) + # return self.PWM2RPM_SCALE * pwm + self.PWM2RPM_CONST + return thrust + self.pwm2thrust(np.dot(self.MIXER_MATRIX, target_torques)) + def pwm2thrust(self, pwm): """Convert pwm to thrust using a quadratic function.""" @@ -179,7 +179,7 @@ def pwm2thrust(self, pwm): # solve quadratic equation using abc formula thrust = (-self.b_coeff + np.sqrt(self.b_coeff**2 - 4 * self.a_coeff * (self.c_coeff - pwm_scaled))) / (2 * self.a_coeff) return thrust - + def thrust2pwm(self, thrust): """Convert thrust to pwm using a quadratic function.""" @@ -188,5 +188,3 @@ def thrust2pwm(self, thrust): pwm = np.minimum(pwm, 1.0) thrust_pwm = pwm * self.MAX_PWM return thrust_pwm - - diff --git a/safe_control_gym/experiments/base_experiment.py b/safe_control_gym/experiments/base_experiment.py index 5a6df5ed9..ece40bcdd 100644 --- a/safe_control_gym/experiments/base_experiment.py +++ b/safe_control_gym/experiments/base_experiment.py @@ -135,7 +135,7 @@ def _execute_evaluations(self, n_episodes=None, n_steps=None, done_on_max_steps= self.env.save_data() obs, info = self._evaluation_reset(ctrl_data=ctrl_data, sf_data=sf_data) break - # elif + # elif # otherwise, keep stepping elif n_steps is not None: @@ -477,7 +477,7 @@ def get_episode_returns(self, exponentiate=False): episode_rewards (list): The total reward of each episode. ''' return self.get_episode_data('reward', postprocess_func=sum, exponentiate=exponentiate) - + def get_episode_exponentiated_returns(self): '''Total reward/return of episodes. diff --git a/safe_control_gym/lyapunov/lyapunov.py b/safe_control_gym/lyapunov/lyapunov.py deleted file mode 100644 index 59dab9b24..000000000 --- a/safe_control_gym/lyapunov/lyapunov.py +++ /dev/null @@ -1,1132 +0,0 @@ - -from collections.abc import Sequence -import itertools - -import numpy as np -import torch - -myDevice = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") - -# Add the configuration settings -class Configuration(object): - """Configuration class.""" - - def __init__(self): - """Initialization.""" - super(Configuration, self).__init__() - - # Dtype for computations - self.dtype = torch.float32 - ####################################################################### - # Batch size for stability verification - # TODO: change this back to 10000 in the future (by Mingxuan) - self.gp_batch_size = 100 # originally 10000 - ####################################################################### - - @property - def np_dtype(self): - """Return the numpy dtype.""" - return np.float32 - - def __repr__(self): - """Print the parameters.""" - params = ['Configuration parameters:', ''] - for param, value in self.__dict__.items(): - params.append('{}: {}'.format(param, value.__repr__())) - - return '\n'.join(params) - -config = Configuration() -del Configuration -_EPS = np.finfo(config.np_dtype).eps - -class DimensionError(Exception): - pass - -class GridWorld(object): - """Base class for function approximators on a regular grid. - - Parameters - ---------- - limits: 2d array-like - A list of limits. For example, [(x_min, x_max), (y_min, y_max)] - num_points: 1d array-like - The number of points with which to grid each dimension. - - NOTE: in original Lyapunov NN, the grid is defined in a normalized - fashion (i.e. [-1, 1] for each dimension) - """ - - def __init__(self, limits, num_points): - """Initialization, see `GridWorld`.""" - super(GridWorld, self).__init__() - - self.limits = np.atleast_2d(limits).astype(config.np_dtype) - num_points = np.broadcast_to(num_points, len(self.limits)) - self.num_points = num_points.astype(np.int16, copy=False) - self.state_dim = len(self.limits) - # print('self.state_dim: ', self.state_dim) - - if np.any(self.num_points < 2): - raise DimensionError('There must be at least 2 points in each ' - 'dimension.') - - # Compute offset and unit hyperrectangle - self.offset = self.limits[:, 0] - self.unit_maxes = ((self.limits[:, 1] - self.offset) - / (self.num_points - 1)).astype(config.np_dtype) - self.offset_limits = np.stack((np.zeros_like(self.limits[:, 0]), - self.limits[:, 1] - self.offset), - axis=1) - - # Statistics about the grid - self.discrete_points = [np.linspace(low, up, n, dtype=config.np_dtype) - for (low, up), n in zip(self.limits, - self.num_points)] - - self.nrectangles = np.prod(self.num_points - 1) - self.nindex = np.prod(self.num_points) - - self.ndim = len(self.limits) - self._all_points = None - - @property - def all_points(self): - """Return all the discrete points of the discretization. - - Returns - ------- - points : ndarray - An array with all the discrete points with size - (self.nindex, self.ndim). - - """ - if self._all_points is None: - # my own implementation - mesh = np.stack(np.meshgrid(*self.discrete_points),-1).reshape(-1,self.state_dim) - self._all_points = mesh.astype(config.np_dtype) - # if self.all_points.shape[1] == 2: - # swap the first two columns - # self._all_points[:,[0,1]] = self._all_points[:,[1,0]] - - # original implementation - # mesh = np.meshgrid(*self.discrete_points, indexing='ij') - # points = np.column_stack(col.ravel() for col in mesh) - # each row of the mesh is a point in the stat space - # self._all_points = points.astype(config.np_dtype) - - return self._all_points - - def __len__(self): - """Return the number of points in the discretization.""" - return self.nindex - - def sample_continuous(self, num_samples): - """Sample uniformly at random from the continuous domain. - - Parameters - ---------- - num_samples : int - - Returns - ------- - points : ndarray - Random points on the continuous rectangle. - - """ - limits = self.limits - rand = np.random.uniform(0, 1, size=(num_samples, self.ndim)) - return rand * np.diff(limits, axis=1).T + self.offset - - def sample_discrete(self, num_samples, replace=False): - """Sample uniformly at random from the discrete domain. - - Parameters - ---------- - num_samples : int - replace : bool, optional - Whether to sample with replacement. - - Returns - ------- - points : ndarray - Random points on the continuous rectangle. - - """ - idx = np.random.choice(self.nindex, size=num_samples, replace=replace) - return self.index_to_state(idx) - - def _check_dimensions(self, states): - """Raise an error if the states have the wrong dimension. - - Parameters - ---------- - states : ndarray - - """ - if not states.shape[1] == self.ndim: - raise DimensionError('the input argument has the wrong ' - 'dimensions.') - - def _center_states(self, states, clip=True): - """Center the states to the interval [0, x]. - - Parameters - ---------- - states : np.array - clip : bool, optinal - If False the data is not clipped to lie within the limits. - - Returns - ------- - offset_states : ndarray - - """ - states = np.atleast_2d(states).astype(config.np_dtype) - states = states - self.offset[None, :] - if clip: - np.clip(states, - self.offset_limits[:, 0] + 2 * _EPS, - self.offset_limits[:, 1] - 2 * _EPS, - out=states) - return states - - def index_to_state(self, indices): - """Convert indices to physical states. - - Parameters - ---------- - indices : ndarray (int) - The indices of points on the discretization. - - Returns - ------- - states : ndarray - The states with physical units that correspond to the indices. - - """ - indices = np.atleast_1d(indices) - ijk_index = np.vstack(np.unravel_index(indices, self.num_points)).T - ijk_index = ijk_index.astype(config.np_dtype) - return ijk_index * self.unit_maxes + self.offset - - def state_to_index(self, states): - """Convert physical states to indices. - - Parameters - ---------- - states: ndarray - Physical states on the discretization. - - Returns - ------- - indices: ndarray (int) - The indices that correspond to the physical states. - - """ - states = np.atleast_2d(states) - self._check_dimensions(states) - states = np.clip(states, self.limits[:, 0], self.limits[:, 1]) - states = (states - self.offset) * (1. / self.unit_maxes) - ijk_index = np.rint(states).astype(np.int32) - return np.ravel_multi_index(ijk_index.T, self.num_points) - - def state_to_rectangle(self, states): - """Convert physical states to its closest rectangle index. - - Parameters - ---------- - states : ndarray - Physical states on the discretization. - - Returns - ------- - rectangles : ndarray (int) - The indices that correspond to rectangles of the physical states. - - """ - ind = [] - for i, (discrete, num_points) in enumerate(zip(self.discrete_points, - self.num_points)): - idx = np.digitize(states[:, i], discrete) - idx -= 1 - np.clip(idx, 0, num_points - 2, out=idx) - - ind.append(idx) - return np.ravel_multi_index(ind, self.num_points - 1) - - def rectangle_to_state(self, rectangles): - """ - Convert rectangle indices to the states of the bottem-left corners. - - Parameters - ---------- - rectangles : ndarray (int) - The indices of the rectangles - - Returns - ------- - states : ndarray - The states that correspond to the bottom-left corners of the - corresponding rectangles. - - """ - rectangles = np.atleast_1d(rectangles) - ijk_index = np.vstack(np.unravel_index(rectangles, - self.num_points - 1)) - ijk_index = ijk_index.astype(config.np_dtype) - return (ijk_index.T * self.unit_maxes) + self.offset - - def rectangle_corner_index(self, rectangles): - """Return the index of the bottom-left corner of the rectangle. - - Parameters - ---------- - rectangles: ndarray - The indices of the rectangles. - - Returns - ------- - corners : ndarray (int) - The indices of the bottom-left corners of the rectangles. - - """ - ijk_index = np.vstack(np.unravel_index(rectangles, - self.num_points - 1)) - return np.ravel_multi_index(np.atleast_2d(ijk_index), - self.num_points) - -class QuadraticFunction(object): - """A quadratic function. - - values(x) = x.T P x - - Parameters - ---------- - matrix : np.array - 2d cost matrix for lyapunov function. - - """ - def __init__(self, matrix): - """Initialization, see `QuadraticLyapunovFunction`.""" - super(QuadraticFunction, self).__init__() - - self.matrix = np.atleast_2d(matrix).astype(config.np_dtype) - # print('self.matrix\n',self.matrix) - self.ndim = self.matrix.shape[0] - # with tf.variable_scope(self.scope_name): - # self.matrix = tf.Variable(self.matrix) - - def __call__(self, *args, **kwargs): - """Evaluate the function using the template to ensure variable sharing. - - Parameters - ---------- - args : list - The input arguments to the function. - kwargs : dict, optional - The keyword arguments to the function. - - Returns - ------- - outputs : list - The output arguments of the function as given by evaluate. - - """ - - outputs = self.forward(*args, **kwargs) - return outputs - - def forward(self, points): - """Like evaluate, but returns a tensor instead.""" - if isinstance(points, np.ndarray): - points = torch.from_numpy(points).float() - # linear_form = tf.matmul(points, self.matrix) - # print('points\n', points) - # print('points shape\n', points.shape) - # print('points type\n', type(points)) - # convert points to np array - if isinstance(points, torch.Tensor): - # if the tensor is on GPU, convert it to CPU first - if points.is_cuda: - points = points.cpu() - points = points.detach().numpy() - points = np.reshape(points, [-1]) - # print('points\n', points) - # reshape points to 1d array - - linear_form = points @ self.matrix - quadratic = linear_form @ points.T - # return tf.reduce_sum(quadratic, axis=1, keepdims=True) - # print('quadratic\n',quadratic) - return torch.tensor(quadratic) - - def gradient(self, points): - """Return the gradient of the function.""" - if isinstance(points, np.ndarray): - points = torch.from_numpy(points).float() - # return tf.matmul(points, self.matrix + self.matrix.T) - return torch.matmul(torch.tensor(points, dtype=config.dtype), \ - torch.tensor(self.matrix + self.matrix.T, dtype=config.dtype)) - -class LyapunovNN(torch.nn.Module): - # def __init__(self, dim_input, layer_dims, activations): - def __init__(self, input_dim, layer_dims, activations, eps=1e-6, device='cpu'): - super(LyapunovNN, self).__init__() - # network layers - self.input_dim = input_dim - self.num_layers = len(layer_dims) - self.activations = activations - self.eps = eps - self.layers = torch.nn.ModuleList() - self.kernel = [] - self.device = device - - if layer_dims[0] < input_dim: - raise ValueError('The first layer dimension must be at \ - least the input dimension!') - - if np.all(np.diff(layer_dims) >= 0): - self.output_dims = layer_dims - else: - raise ValueError('Each layer must maintain or increase \ - the dimension of its input!') - - self.hidden_dims = np.zeros(self.num_layers, dtype=int) - for i in range(self.num_layers): - if i == 0: - layer_input_dim = self.input_dim - else: - layer_input_dim = self.output_dims[i - 1] - self.hidden_dims[i] = np.ceil((layer_input_dim + 1) / 2).astype(int) - - # # build the nn structure - # self.linear1 = torch.nn.Linear(2, 2, bias=False) - # self.linear2 = torch.nn.Linear(2, 62, bias=False) - # self.linear3 = torch.nn.Linear(64, 33, bias=False) - # self.linear4 = torch.nn.Linear(64, 33, bias=False) - # W1 = self.linear1.weight - # W2 = self.linear2.weight - # # print('W1.shape\n', W1.shape) - # # print('W2.shape\n', W2.shape) - # inter_kernel = torch.matmul(W1.T, W1) + self.eps * torch.eye(W1.shape[1]) - # self.kernel_1 = torch.cat((inter_kernel, W2), dim=0) - # W3 = self.linear3.weight - # self.kernel_2 = torch.matmul(W3.T, W3) + self.eps * torch.eye(W3.shape[1]) - # W4 = self.linear4.weight - # self.kernel_3 = torch.matmul(W4.T, W4) + self.eps * torch.eye(W4.shape[1]) - - # def forward(self, x): - # if isinstance(x, np.ndarray): - # x = torch.from_numpy(x).float() - # x = self.activations[0](torch.matmul(self.kernel_1, x)) - # x = self.activations[1](torch.matmul(self.kernel_2, x)) - # x = self.activations[2](torch.matmul(self.kernel_3, x)) - # x = torch.sum(torch.square(x)) - # return x - - # def update_kernel(self): - # # update the kernel - # W1 = self.linear1.weight - # W2 = self.linear2.weight - # inter_kernel = torch.matmul(W1.T, W1) + self.eps * torch.eye(W1.shape[1]) - # self.kernel_1 = torch.cat((inter_kernel, W2), dim=0) - # W3 = self.linear3.weight - # self.kernel_2 = torch.matmul(W3.T, W3) + self.eps * torch.eye(W3.shape[1]) - # W4 = self.linear4.weight - # self.kernel_3 = torch.matmul(W4.T, W4) + self.eps * torch.eye(W4.shape[1]) - - - # build the nn structure - for i in range(self.num_layers): - if i == 0: - layer_input_dim = self.input_dim - else: - layer_input_dim = self.output_dims[i - 1] - self.layers.append(\ - torch.nn.Linear(layer_input_dim, self.hidden_dims[i], bias=False)) - # W = self.layers[-1].weight - # weight = W.clone() - # weight = W - # kernel = torch.matmul(weight.T, weight) + self.eps * torch.eye(W.shape[1]) - # kernel = torch.matmul(W.T, W) + self.eps * torch.eye(W.shape[1]) - dim_diff = self.output_dims[i] - layer_input_dim - if dim_diff > 0: - self.layers.append(torch.nn.Linear(layer_input_dim, dim_diff, bias=False)) - # print(kernel.shape, self.layers[-1].weight.shape) - # kernel = torch.cat((kernel, self.layers[-1].weight), dim=0) - # self.kernel.append(kernel) - self.update_kernel() - - def forward(self, x): - if isinstance(x, np.ndarray): - x = torch.from_numpy(x).float() - # put the input to the device - x = x.to(self.device) - - for i in range(self.num_layers): - # print('self.kernel[i].is_cuda\n', self.kernel[i].is_cuda) - # print('x.is_cuda\n', x.is_cuda) - layer_output = torch.matmul(self.kernel[i], x) - x = self.activations[i](layer_output) - values = torch.sum(torch.square(x), dim=-1) - return values - - def update_kernel(self): - self.kernel = [] # clear the kernel - param_idx = 0 # for skipping the extra layer parameters - for i in range(self.num_layers): - if i == 0: - layer_input_dim = self.input_dim - else: - layer_input_dim = self.output_dims[i - 1] - # build the positive definite part of the kernel - W = self.layers[i + param_idx].weight - weight = W.clone() - kernel = torch.matmul(weight.T, weight) + self.eps * torch.eye(W.shape[1]) - # if the kernel need extra part, append the parameters of the next layer - dim_diff = self.output_dims[i] - layer_input_dim - if dim_diff > 0: - kernel = torch.cat((kernel, self.layers[i+1].weight), dim=0) - param_idx += 1 - # print('i: ', i) - self.kernel.append(kernel) - - # def print_manual_kernel(self): - # print('Kernel 1:\n', self.kernel_1) - # print('Kernel 2:\n', self.kernel_2) - # print('Kernel 3:\n', self.kernel_3) - - # # print kernel eigenvalues - # eigvals, _ = np.linalg.eig(self.kernel_1[0:2, :].detach().numpy()) - # print('Eigenvalues of (W0.T*W0 + eps*I):', eigvals, '\n') - # eigvals, _ = np.linalg.eig(self.kernel_2.detach().numpy()) - # print('Eigenvalues of (W0.T*W0 + eps*I):', eigvals, '\n') - # eigvals, _ = np.linalg.eig(self.kernel_3.detach().numpy()) - # print('Eigenvalues of (W0.T*W0 + eps*I):', eigvals, '\n') - - def print_params(self): - offset = 0 - # get nn parameters - params = [] - for _, param in self.named_parameters(): - params.append(param.data) - for i, dim_diff in enumerate(np.diff(np.concatenate([[self.input_dim], self.output_dims]))): - print('Layer weights {}:'.format(i)) - W0 = params[offset + i] - print('W0:\n{}'.format(W0)) - if dim_diff > 0: - W1 = params[offset + 1 + i] - print('W1:\n{}'.format(W1)) - else: - offset += 1 - kernel = W0.T.dot(W0) + self.eps * np.eye(W0.shape[1]) - eigvals, _ = np.linalg.eig(kernel) - print('Eigenvalues of (W0.T*W0 + eps*I):', eigvals, '\n') - -class Lyapunov(object): - """A class for general Lyapunov functions. - - Parameters - ---------- - discretization : ndarray - A discrete grid on which to evaluate the Lyapunov function. - lyapunov_function : callable or instance of `DeterministicFunction` - The lyapunov function. Can be called with states and returns the - corresponding values of the Lyapunov function. - dynamics : a callable or an instance of `Function` - The dynamics model. Can be either a deterministic function or something - uncertain that includes error bounds. - lipschitz_dynamics : ndarray or float - The Lipschitz constant of the dynamics. Either globally, or locally - for each point in the discretization (within a radius given by the - discretization constant. This is the closed-loop Lipschitz constant - including the policy! - lipschitz_lyapunov : ndarray or float - The Lipschitz constant of the lyapunov function. Either globally, or - locally for each point in the discretization (within a radius given by - the discretization constant. - tau : float - The discretization constant. - policy : ndarray, optional - The control policy used at each state (Same number of rows as the - discretization). - initial_set : ndarray, optional - A boolean array of states that are known to be safe a priori. - adaptive : bool, optional - A boolean determining whether an adaptive discretization is used for - stability verification. - - """ - - def __init__(self, discretization, lyapunov_function, dynamics, - lipschitz_dynamics, lipschitz_lyapunov, - tau, policy, initial_set=None, adaptive=False): - """Initialization, see `Lyapunov` for details.""" - super(Lyapunov, self).__init__() - - self.discretization = discretization - self.policy = policy - - # Keep track of the safe sets - self.safe_set = np.zeros(np.prod(discretization.num_points), - dtype=bool) - - self.initial_safe_set = initial_set - if initial_set is not None: - # print('initial safe set\n', initial_set) - # print('initial safe set shape\n', initial_set.shape) - # print('initial safe set type\n', type(initial_set)) - # print('self.safe_set\n', self.safe_set) - # print('self.safe_set shape\n', self.safe_set.shape) - # print('self.safe_set type\n', type(self.safe_set)) - self.safe_set[initial_set] = True - - # Discretization constant - self.tau = tau - - # Make sure dynamics are of standard framework - self.dynamics = dynamics - - # Make sure Lyapunov fits into standard framework - self.lyapunov_function = lyapunov_function - - # Storage for graph - self._storage = dict() - # self.feed_dict = get_feed_dict(tf.get_default_graph()) - - # Lyapunov values - self.values = None - - # self.c_max = tf.placeholder(config.dtype, shape=()) - self.c_max = None - # self.feed_dict[self.c_max] = 0. - - self._lipschitz_dynamics = lipschitz_dynamics - self._lipschitz_lyapunov = lipschitz_lyapunov - - self.update_values() - - self.adaptive = adaptive - - # Keep track of the refinement `N(x)` used around each state `x` in - # the adaptive discretization; `N(x) = 0` by convention if `x` is - # unsafe - self._refinement = np.zeros(discretization.nindex, dtype=int) - if initial_set is not None: - self._refinement[initial_set] = 1 - - def update_values(self): - """Update the discretized values when the Lyapunov function changes.""" - values = np.zeros(self.discretization.nindex) - for i in range(self.discretization.nindex): - # print('self.discretization.all_points[i]\n', self.discretization.all_points[i]) - # print('self.lyapunov_function(self.discretization.all_points[i]).squeeze(), \n', \ - # self.lyapunov_function(\ - # self.discretization.all_points[i]).squeeze()) - values[i] = self.lyapunov_function(\ - self.discretization.all_points[i]).squeeze() - self.values = values - - def update_safe_set(self, can_shrink=True, max_refinement=1, - safety_factor=1., parallel_iterations=1): - """Compute and update the safe set. - - Parameters - ---------- - can_shrink : bool, optional - A boolean determining whether previously safe states other than the - initial safe set must be verified again (i.e., can the safe set - shrink in volume?) - max_refinement : int, optional - The maximum integer divisor used for adaptive discretization. - safety_factor : float, optional - A multiplicative factor greater than 1 used to conservatively - estimate the required adaptive discretization. - parallel_iterations : int, optional - The number of parallel iterations to use for safety verification in - the adaptive case. Passed to `tf.map_fn`. - - """ - safety_factor = np.maximum(safety_factor, 1.) - - np_states = lambda x: np.array(x, dtype=config.dtype) - # decrease = lambda x: self.v_decrease_bound(x, self.dynamics(x, self.policy(x))) - decrease = lambda x: self.v_decrease_bound(x, self.dynamics(x)) - threshold = lambda x: self.threshold(x, self.tau) - np_negative = lambda x: np.squeeze(decrease(x) < threshold(x), axis=0) - - if can_shrink: - # Reset the safe set and adaptive discretization - safe_set = np.zeros_like(self.safe_set, dtype=bool) - refinement = np.zeros_like(self._refinement, dtype=int) - if self.initial_safe_set is not None: - safe_set[self.initial_safe_set] = True - refinement[self.initial_safe_set] = 1 - else: - # Assume safe set cannot shrink - safe_set = self.safe_set - refinement = self._refinement - - value_order = np.argsort(self.values) - safe_set = safe_set[value_order] - refinement = refinement[value_order] - - # Verify safety in batches - batch_size = config.gp_batch_size - batch_generator = batchify((value_order, safe_set, refinement), - batch_size) - # print('batch_generator\n', batch_generator.__dir__()) - # exit() - index_to_state = self.discretization.index_to_state - - ####################################################################### - - for i, (indices, safe_batch, refine_batch) in batch_generator: - # print('indices\n', indices) - # print('safe_batch\n', safe_batch) - # print('refine_batch\n', refine_batch) - # exit() - - states = index_to_state(indices) - np_state = np.squeeze(states) - # print('np_states in update safe set\n', np_state) - # print('np_states shape\n', np_state.shape) - # print('np_states type\n', type(np_state)) - - # Update the safety with the safe_batch result - # negative = tf_negative.eval(feed_dict) - # negative = np_negative(np_state) - negative = np.zeros_like(safe_batch, dtype=bool) - for state_index in range(len(np_state)): - negative[state_index] = np_negative(np_state[state_index]) - # convert negative to np array - negative = np.array(negative, dtype=bool) - # check data type - # print('negative\n', negative) - # print('negative shape\n', negative.shape) - # print('negative type\n', type(negative)) - # print('safe_batch\n', safe_batch) - # print('safe_batch shape\n', safe_batch.shape) - # print('safe_batch type\n', type(safe_batch)) - safe_batch |= negative - # exit() - refine_batch[negative] = 1 - - # Boolean array: argmin returns first element that is False - # If all are safe then it returns 0 - bound = np.argmin(safe_batch) - refine_bound = 0 - - # Check if there are unsafe elements in the batch - if bound > 0 or not safe_batch[0]: - safe_batch[bound:] = False - refine_batch[bound:] = 0 - break - - # The largest index of a safe value - max_index = i + bound + refine_bound - 1 - - ####################################################################### - - # Set placeholder for c_max to the corresponding value - self.c_max = self.values[value_order[max_index]] - - # Restore the order of the safe set and adaptive refinement - safe_nodes = value_order[safe_set] - self.safe_set[:] = False - self.safe_set[safe_nodes] = True - self._refinement[value_order] = refinement - - # Ensure the initial safe set is kept - if self.initial_safe_set is not None: - self.safe_set[self.initial_safe_set] = True - self._refinement[self.initial_safe_set] = 1 - - def threshold(self, states, tau=None): - """Return the safety threshold for the Lyapunov condition. - - Parameters - ---------- - states : ndarray or Tensor - - tau : float or Tensor, optional - Discretization constant to consider. - - Returns - ------- - lipschitz : float, ndarray or Tensor - Either the scalar threshold or local thresholds, depending on - whether lipschitz_lyapunov and lipschitz_dynamics are local or not. - - """ - if tau is None: - tau = self.tau - # if state is not a tensor, convert it to a tensor - if not isinstance(states, torch.Tensor): - states = torch.tensor(states, dtype=config.dtype, requires_grad=True) - states = states.float() - # print('states\n', states) - lv = self._lipschitz_lyapunov(states) - # print('lv\n', lv) - # print('lv shape\n', lv.shape) - # print('hasattr(self._lipschitz_lyapunov, __call__)\n', hasattr(self._lipschitz_lyapunov, '__call__')) - ## TODO: check this part (by Mingxuan) - # if hasattr(self._lipschitz_lyapunov, '__call__') and lv.shape[1] > 1: - # # lv = tf.norm(lv, ord=1, axis=1, keepdims=True) - # lv = torch.norm(lv, p=1, dim=1, keepdim=True) - # convert states to np array - if states.is_cuda: - states = states.cpu() - states = states.detach().numpy() - lf = self._lipschitz_dynamics(states) - return - lv * (1. + lf) * tau - - def v_decrease_bound(self, states, next_states): - """Compute confidence intervals for the decrease along Lyapunov function. - - Parameters - ---------- - states : np.array - The states at which to start (could be equal to discretization). - next_states : np.array or tuple - The dynamics evaluated at each point on the discretization. If - the dynamics are uncertain then next_states is a tuple with mean - and error bounds. - - Returns - ------- - upper_bound : np.array - The upper bound on the change in values at each grid point. - - """ - v_dot, v_dot_error = self.v_decrease_confidence(states, next_states) - - return v_dot + v_dot_error - - def v_decrease_confidence(self, states, next_states): - """Compute confidence intervals for the decrease along Lyapunov function. - - Parameters - ---------- - states : np.array - The states at which to start (could be equal to discretization). - next_states : np.array - The dynamics evaluated at each point on the discretization. If - the dynamics are uncertain then next_states is a tuple with mean - and error bounds. - - Returns - ------- - mean : np.array - The expected decrease in values at each grid point. - error_bounds : np.array - The error bounds for the decrease at each grid point - - """ - if isinstance(next_states, Sequence): - next_states, error_bounds = next_states - lv = self._lipschitz_lyapunov(next_states) - # bound = tf.reduce_sum(lv * error_bounds, axis=1, keepdims=True) - # bound = torch.sum(lv * error_bounds, dim=1, keepdim=True) - bound = np.sum(lv * error_bounds, axis=1, keepdims=True) - else: - # bound = tf.constant(0., dtype=config.dtype) - bound = torch.tensor(0., dtype=config.dtype) - if not isinstance(states, torch.Tensor): - states = torch.tensor(states, dtype=torch.float64) - states = states.float() # avoid feedforward data type error - # next_states is of type casadi.DM - # convert the next_states first to numpy array, then to torch tensor - if not isinstance(next_states, torch.Tensor): - next_states = torch.tensor(np.array(next_states), dtype=torch.float64) - next_states = next_states.float() # avoid feedforward data type error - # print('next_states\n', next_states) - # print('next_states shape\n', next_states.shape) - # print('next_states type\n', type(next_states)) - # print('next_states data type\n', next_states.dtype) - v_decrease = (self.lyapunov_function(next_states) - - self.lyapunov_function(states)) - - return v_decrease, bound - -# TODO: put this in a separate file (by Mingxuan) -def batchify(arrays, batch_size): - """Yield the arrays in batches and in order. - - The last batch might be smaller than batch_size. - - Parameters - ---------- - arrays : list of ndarray - The arrays that we want to convert to batches. - batch_size : int - The size of each individual batch. - """ - if not isinstance(arrays, (list, tuple)): - arrays = (arrays,) - - # Iterate over array in batches - for i, i_next in zip(itertools.count(start=0, step=batch_size), - itertools.count(start=batch_size, step=batch_size)): - - batches = [array[i:i_next] for array in arrays] - - # Break if there are no points left - if batches[0].size: - yield i, batches - else: - break - -class GridWorld_pendulum(object): - """Base class for function approximators on a regular grid. - - Parameters - ---------- - limits: 2d array-like - A list of limits. For example, [(x_min, x_max), (y_min, y_max)] - num_points: 1d array-like - The number of points with which to grid each dimension. - - NOTE: in original Lyapunov NN, the grid is defined in a normalized - fashion (i.e. [-1, 1] for each dimension) - """ - - def __init__(self, limits, num_points): - """Initialization, see `GridWorld`.""" - super(GridWorld_pendulum, self).__init__() - - self.limits = np.atleast_2d(limits).astype(config.np_dtype) - num_points = np.broadcast_to(num_points, len(self.limits)) - self.num_points = num_points.astype(np.int16, copy=False) - self.state_dim = len(self.limits) - # print('self.state_dim: ', self.state_dim) - - if np.any(self.num_points < 2): - raise DimensionError('There must be at least 2 points in each ' - 'dimension.') - - # Compute offset and unit hyperrectangle - self.offset = self.limits[:, 0] - self.unit_maxes = ((self.limits[:, 1] - self.offset) - / (self.num_points - 1)).astype(config.np_dtype) - self.offset_limits = np.stack((np.zeros_like(self.limits[:, 0]), - self.limits[:, 1] - self.offset), - axis=1) - - # Statistics about the grid - self.discrete_points = [np.linspace(low, up, n, dtype=config.np_dtype) - for (low, up), n in zip(self.limits, - self.num_points)] - - self.nrectangles = np.prod(self.num_points - 1) - self.nindex = np.prod(self.num_points) - - self.ndim = len(self.limits) - self._all_points = None - - @property - def all_points(self): - """Return all the discrete points of the discretization. - - Returns - ------- - points : ndarray - An array with all the discrete points with size - (self.nindex, self.ndim). - - """ - if self._all_points is None: - # my own implementation - mesh = np.stack(np.meshgrid(*self.discrete_points),-1).reshape(-1,self.state_dim) - self._all_points = mesh.astype(config.np_dtype) - if self.all_points.shape[1] == 2: - # swap the first two columns - self._all_points[:,[0,1]] = self._all_points[:,[1,0]] - - # original implementation - # mesh = np.meshgrid(*self.discrete_points, indexing='ij') - # points = np.column_stack(col.ravel() for col in mesh) - # each row of the mesh is a point in the stat space - # self._all_points = points.astype(config.np_dtype) - - return self._all_points - - def __len__(self): - """Return the number of points in the discretization.""" - return self.nindex - - def sample_continuous(self, num_samples): - """Sample uniformly at random from the continuous domain. - - Parameters - ---------- - num_samples : int - - Returns - ------- - points : ndarray - Random points on the continuous rectangle. - - """ - limits = self.limits - rand = np.random.uniform(0, 1, size=(num_samples, self.ndim)) - return rand * np.diff(limits, axis=1).T + self.offset - - def sample_discrete(self, num_samples, replace=False): - """Sample uniformly at random from the discrete domain. - - Parameters - ---------- - num_samples : int - replace : bool, optional - Whether to sample with replacement. - - Returns - ------- - points : ndarray - Random points on the continuous rectangle. - - """ - idx = np.random.choice(self.nindex, size=num_samples, replace=replace) - return self.index_to_state(idx) - - def _check_dimensions(self, states): - """Raise an error if the states have the wrong dimension. - - Parameters - ---------- - states : ndarray - - """ - if not states.shape[1] == self.ndim: - raise DimensionError('the input argument has the wrong ' - 'dimensions.') - - def _center_states(self, states, clip=True): - """Center the states to the interval [0, x]. - - Parameters - ---------- - states : np.array - clip : bool, optinal - If False the data is not clipped to lie within the limits. - - Returns - ------- - offset_states : ndarray - - """ - states = np.atleast_2d(states).astype(config.np_dtype) - states = states - self.offset[None, :] - if clip: - np.clip(states, - self.offset_limits[:, 0] + 2 * _EPS, - self.offset_limits[:, 1] - 2 * _EPS, - out=states) - return states - - def index_to_state(self, indices): - """Convert indices to physical states. - - Parameters - ---------- - indices : ndarray (int) - The indices of points on the discretization. - - Returns - ------- - states : ndarray - The states with physical units that correspond to the indices. - - """ - indices = np.atleast_1d(indices) - ijk_index = np.vstack(np.unravel_index(indices, self.num_points)).T - ijk_index = ijk_index.astype(config.np_dtype) - return ijk_index * self.unit_maxes + self.offset - - def state_to_index(self, states): - """Convert physical states to indices. - - Parameters - ---------- - states: ndarray - Physical states on the discretization. - - Returns - ------- - indices: ndarray (int) - The indices that correspond to the physical states. - - """ - states = np.atleast_2d(states) - self._check_dimensions(states) - states = np.clip(states, self.limits[:, 0], self.limits[:, 1]) - states = (states - self.offset) * (1. / self.unit_maxes) - ijk_index = np.rint(states).astype(np.int32) - return np.ravel_multi_index(ijk_index.T, self.num_points) - - def state_to_rectangle(self, states): - """Convert physical states to its closest rectangle index. - - Parameters - ---------- - states : ndarray - Physical states on the discretization. - - Returns - ------- - rectangles : ndarray (int) - The indices that correspond to rectangles of the physical states. - - """ - ind = [] - for i, (discrete, num_points) in enumerate(zip(self.discrete_points, - self.num_points)): - idx = np.digitize(states[:, i], discrete) - idx -= 1 - np.clip(idx, 0, num_points - 2, out=idx) - - ind.append(idx) - return np.ravel_multi_index(ind, self.num_points - 1) - - def rectangle_to_state(self, rectangles): - """ - Convert rectangle indices to the states of the bottem-left corners. - - Parameters - ---------- - rectangles : ndarray (int) - The indices of the rectangles - - Returns - ------- - states : ndarray - The states that correspond to the bottom-left corners of the - corresponding rectangles. - - """ - rectangles = np.atleast_1d(rectangles) - ijk_index = np.vstack(np.unravel_index(rectangles, - self.num_points - 1)) - ijk_index = ijk_index.astype(config.np_dtype) - return (ijk_index.T * self.unit_maxes) + self.offset - - def rectangle_corner_index(self, rectangles): - """Return the index of the bottom-left corner of the rectangle. - - Parameters - ---------- - rectangles: ndarray - The indices of the rectangles. - - Returns - ------- - corners : ndarray (int) - The indices of the bottom-left corners of the rectangles. - - """ - ijk_index = np.vstack(np.unravel_index(rectangles, - self.num_points - 1)) - return np.ravel_multi_index(np.atleast_2d(ijk_index), - self.num_points) diff --git a/safe_control_gym/lyapunov/utilities.py b/safe_control_gym/lyapunov/utilities.py deleted file mode 100644 index 1090fc9ad..000000000 --- a/safe_control_gym/lyapunov/utilities.py +++ /dev/null @@ -1,722 +0,0 @@ - -# import itertools # for batchify (now in lyapnov.py) - -import numpy as np -from matplotlib.colors import ListedColormap -import scipy.linalg -from scipy import signal -import torch -from parfor import pmap -import multiprocessing as mp -import casadi as cs - -from safe_control_gym.lyapunov.lyapunov import GridWorld -from safe_control_gym.experiments.base_experiment import BaseExperiment -from safe_control_gym.lyapunov.lyapunov import config -from safe_control_gym.math_and_models.symbolic_systems import SymbolicModel - -NP_DTYPE = config.np_dtype -TF_DTYPE = config.dtype - -def gridding(state_dim, state_constraints, num_states = 251, use_zero_threshold = True): - ''' evenly discretize the state space - - Args: - state_dim (int): The dimension of the state space. - state_constraints (np array): The constraints of the state space. - num_state (int): The number of states along each dimension. - use_zero_threshold (bool): Whether to use zero threshold. - False: the grid is infinitesimal - ''' - - # State grid - if state_constraints is None: - state_constraints = np.array([[-1., 1.], ] * state_dim) - grid_limits = state_constraints - state_discretization = GridWorld(grid_limits, num_states) - - # Discretization constant - if use_zero_threshold: - tau = 0.0 # assume the grid is infinitesimal - else: - tau = np.sum(state_discretization.unit_maxes) / 2 - - print('Grid size: {}'.format(state_discretization.nindex)) - print('Discretization constant (tau): {}'.format(tau)) - return state_discretization - -def compute_roa(grid, env_func, ctrl ,equilibrium=None, no_traj=True): - """Compute the largest ROA as a set of states in a discretization.""" - if isinstance(grid, np.ndarray): - all_points = grid - nindex = grid.shape[0] - ndim = grid.shape[1] - else: # grid is a GridWorld instance - all_points = grid.all_points - nindex = grid.nindex # number of points in the discretization - ndim = grid.ndim # dimension of the state space - - # Forward-simulate all trajectories from initial points in the discretization - # if no_traj: - # end_states = all_points - # for t in range(1, horizon): - # end_states = closed_loop_dynamics(end_states) - # else: - # trajectories = np.empty((nindex, ndim, horizon)) - # trajectories[:, :, 0] = all_points - # for t in range(1, horizon): - # trajectories[:, :, t] = closed_loop_dynamics(trajectories[:, :, t - 1]) - # end_states = trajectories[:, :, -1] - random_env = env_func(gui=False) - - roa = np.zeros((nindex)) - trajectories = [{} for _ in range(nindex)] - - for state_index in range(nindex): - # for all initial state in the grid - # print('state_index', state_index) - init_state = grid.all_points[state_index] - init_state_dict = {'init_x': init_state[0], 'init_x_dot': init_state[1], \ - 'init_theta': init_state[2], 'init_theta_dot': init_state[3]} - init_state, _ = random_env.reset(init_state = init_state_dict) - # print('init_state', init_state) - static_env = env_func(gui=False, random_state=False, init_state=init_state) - static_train_env = env_func(gui=False, randomized_init=False, init_state=init_state) - # Create experiment, train, and run evaluation - experiment = BaseExperiment(env=static_env, ctrl=ctrl, train_env=static_train_env) - - try: - trajs_data, _ = experiment.run_evaluation(training=True, n_episodes=1, verbose=False) - roa[state_index] = trajs_data['info'][-1][-1]['goal_reached'] - input_traj = trajs_data['action'][0] - state_traj = trajs_data['obs'][0] - trajectories[state_index]['state_traj'] = state_traj - trajectories[state_index]['input_traj'] = input_traj - print('trajectory[state_index]', trajectories[state_index]) - - print('goal reached', trajs_data['info'][-1][-1]['goal_reached']) - # exit() - # close environments - static_env.close() - static_train_env.close() - except RuntimeError: - print('RuntimeError: possibly infeasible initial state') - roa[state_index] = False - # print(ctrl.model.__dir__()) - # print(ctrl.model.nx) - # exit() - trajectories[state_index]['state_traj'] = np.zeros((2, ctrl.model.nx)) - trajectories[state_index]['input_traj'] = np.zeros((1, ctrl.model.nu)) - # close environments - static_env.close() - static_train_env.close() - continue - # trajs_data, _ = experiment.run_evaluation(training=True, n_episodes=1, verbose=False) - # print('obs\n', trajs_data['obs']) - # print('trajs_data\n', trajs_data['info'][-1][-1]) - # print('\n') - # print('trajs_data[\'info\']\n', trajs_data['info'][-1][-1]['goal_reached']) - # input('press enter to continue') - # print('\n') - # exit() - # print('goal reached', trajs_data['info'][-1][1]['goal_reached']) - - - # if equilibrium is None: - # equilibrium = np.zeros((1, ndim)) - random_env.close() - # # Compute an approximate ROA as all states that end up "close" to 0 - # dists = np.linalg.norm(end_states - equilibrium, ord=2, axis=1, keepdims=True).ravel() - # roa = (dists <= tol) - if no_traj: - return roa - else: - return roa, trajectories - - - -def compute_roa_fix(grid, env_func, ctrl ,equilibrium=None, no_traj=True): - """Compute the largest ROA as a set of states in a discretization.""" - if isinstance(grid, np.ndarray): - all_points = grid - nindex = grid.shape[0] - ndim = grid.shape[1] - else: # grid is a GridWorld instance - all_points = grid.all_points - nindex = grid.nindex # number of points in the discretization - ndim = grid.ndim # dimension of the state space - - random_env = env_func(gui=False) - - roa = np.zeros((nindex)) - - for state_index in range(nindex): - # for all initial state in the grid - # print('state_index', state_index) - init_state = grid.all_points[state_index] - init_state_dict = {'init_x': 0.0, 'init_x_dot': init_state[0], \ - 'init_theta': init_state[1], 'init_theta_dot': init_state[2]} - init_state, _ = random_env.reset(init_state = init_state_dict) - # print('init_state', init_state) - static_env = env_func(gui=False, random_state=False, init_state=init_state) - static_train_env = env_func(gui=False, randomized_init=False, init_state=init_state) - # Create experiment, train, and run evaluation - experiment = BaseExperiment(env=static_env, ctrl=ctrl, train_env=static_train_env) - - try: - trajs_data, _ = experiment.run_evaluation(training=True, n_episodes=1, verbose=False) - roa[state_index] = trajs_data['info'][-1][-1]['goal_reached'] - # close environments - static_env.close() - static_train_env.close() - except RuntimeError: - print('RuntimeError: possibly infeasible initial state') - roa[state_index] = False - # close environments - static_env.close() - static_train_env.close() - continue - - # if equilibrium is None: - # equilibrium = np.zeros((1, ndim)) - random_env.close() - # # Compute an approximate ROA as all states that end up "close" to 0 - # dists = np.linalg.norm(end_states - equilibrium, ord=2, axis=1, keepdims=True).ravel() - # roa = (dists <= tol) - if no_traj: - return roa - else: - return roa, trajectories - - -# define the function to be parallelized -def simulate_at_index(state_index, grid, env_func, ctrl): - random_env = env_func(gui=False) - init_state = grid.all_points[state_index] - init_state_dict = {'init_x': init_state[0], 'init_x_dot': init_state[1], \ - 'init_theta': init_state[2], 'init_theta_dot': init_state[3]} - init_state, _ = random_env.reset(init_state = init_state_dict) - # print('init_state', init_state) - static_env = env_func(gui=False, random_state=False, init_state=init_state) - static_train_env = env_func(gui=False, randomized_init=False, init_state=init_state) - # Create experiment, train, and run evaluation - experiment = BaseExperiment(env=static_env, ctrl=ctrl, train_env=static_train_env) - - # # if infeasible initial state, return False - # try: - # trajs_data, _ = experiment.run_evaluation(training=True, n_episodes=1, verbose=False) - # static_env.close() - # static_train_env.close() - # print('goal reached', trajs_data['info'][-1][-1]['goal_reached']) - # # return result - # if trajs_data['info'][-1][-1]['goal_reached']: - # return True - # else: - # return False - # except RuntimeError: - # print('RuntimeError: possibly infeasible initial state') - # # close environments - # static_env.close() - # static_train_env.close() - # return False - # # close the env - trajs_data, _ = experiment.run_evaluation(training=True, n_episodes=1, verbose=False) - static_env.close() - static_train_env.close() - random_env.close() - - return trajs_data['info'][-1][-1]['goal_reached'] - - -def compute_roa_par(grid, env_func, ctrl, equilibrium=None, no_traj=True): - """Compute the largest ROA as a set of states in a discretization.""" - if isinstance(grid, np.ndarray): - all_points = grid - nindex = grid.shape[0] - ndim = grid.shape[1] - else: # grid is a GridWorld instance - all_points = grid.all_points - nindex = grid.nindex # number of points in the discretization - ndim = grid.ndim # dimension of the state space - - # Forward-simulate all trajectories from initial points in the discretization - # random_env = env_func(gui=False) - roa = [False] * nindex - - # # init multiprocessing pool - # pool = mp.Pool(mp.cpu_count()) - # # pool apply the 'simulate_at_index' function to all state indices - # roa = [pool.apply(simulate_at_index, \ - # args=(state_idx, grid, random_env, env_func, ctrl)) for state_idx in range(nindex)] - # # close the pool - # pool.close() - # roa = pmap(simulate_at_index, range(nindex), (grid, random_env, env_func, ctrl)) - roa = pmap(simulate_at_index, range(nindex), (grid, env_func, ctrl)) - - # convert list to np array - roa = np.array(roa) - - if no_traj: - return roa - else: - return roa, trajectories - -# define the function to be parallelized -def simulate_at_index_fix(state_index, grid, env_func, ctrl): - random_env = env_func(gui=False) - init_state = grid.all_points[state_index] - init_state_dict = {'init_x': 0.0, 'init_x_dot': init_state[0], \ - 'init_theta': init_state[1], 'init_theta_dot': init_state[2]} - init_state, _ = random_env.reset(init_state = init_state_dict) - # print('init_state', init_state) - static_env = env_func(gui=False, random_state=False, init_state=init_state) - static_train_env = env_func(gui=False, randomized_init=False, init_state=init_state) - # Create experiment, train, and run evaluation - experiment = BaseExperiment(env=static_env, ctrl=ctrl, train_env=static_train_env) - - trajs_data, _ = experiment.run_evaluation(training=True, n_episodes=1, verbose=False) - static_env.close() - static_train_env.close() - random_env.close() - - return trajs_data['info'][-1][-1]['goal_reached'] - - -def compute_roa_fix_par(grid, env_func, ctrl, equilibrium=None, no_traj=True): - """Compute the largest ROA as a set of states in a discretization.""" - if isinstance(grid, np.ndarray): - all_points = grid - nindex = grid.shape[0] - ndim = grid.shape[1] - else: # grid is a GridWorld instance - all_points = grid.all_points - nindex = grid.nindex # number of points in the discretization - ndim = grid.ndim # dimension of the state space - - # Forward-simulate all trajectories from initial points in the discretization - roa = [False] * nindex - roa = pmap(simulate_at_index_fix, range(nindex), (grid, env_func, ctrl)) - # convert list to np array - roa = np.array(roa) - - if no_traj: - return roa - else: - return roa, trajectories - -def binary_cmap(color='red', alpha=1.): - """Construct a binary colormap.""" - if color == 'red': - color_code = (1., 0., 0., alpha) - elif color == 'green': - color_code = (0., 1., 0., alpha) - elif color == 'blue': - color_code = (0., 0., 1., alpha) - else: - color_code = color - transparent_code = (1., 1., 1., 0.) - return ListedColormap([transparent_code, color_code]) - -def balanced_class_weights(y_true, scale_by_total=True): - """Compute class weights from class label counts.""" - y = y_true.astype(np.bool_) - nP = y.sum() - nN = y.size - y.sum() - class_counts = np.array([nN, nP]) - - weights = np.ones_like(y, dtype=float) - weights[ y] /= nP - weights[~y] /= nN - if scale_by_total: - weights *= y.size - - return weights, class_counts - -def dlqr(a, b, q, r): - """Compute the discrete-time LQR controller. - - The optimal control input is `u = -k.dot(x)`. - - Parameters - ---------- - a : np.array - b : np.array - q : np.array - r : np.array - - Returns - ------- - k : np.array - Controller matrix - p : np.array - Cost to go matrix - """ - a, b, q, r = map(np.atleast_2d, (a, b, q, r)) - p = scipy.linalg.solve_discrete_are(a, b, q, r) - - # LQR gain - # k = (b.T * p * b + r)^-1 * (b.T * p * a) - bp = b.T.dot(p) - tmp1 = bp.dot(b) - tmp1 += r - tmp2 = bp.dot(a) - k = np.linalg.solve(tmp1, tmp2) - - return k, p - -def discretize_linear_system(A, B, dt, exact=False): - '''Discretization of a linear system - - dx/dt = A x + B u - --> xd[k+1] = Ad xd[k] + Bd ud[k] where xd[k] = x(k*dt) - - Args: - A (ndarray): System transition matrix. - B (ndarray): Input matrix. - dt (scalar): Step time interval. - exact (bool): If to use exact discretization. - - Returns: - Ad (ndarray): The discrete linear state matrix A. - Bd (ndarray): The discrete linear input matrix B. - ''' - - state_dim, input_dim = A.shape[1], B.shape[1] - - if exact: - M = np.zeros((state_dim + input_dim, state_dim + input_dim)) - M[:state_dim, :state_dim] = A - M[:state_dim, state_dim:] = B - - Md = scipy.linalg.expm(M * dt) - Ad = Md[:state_dim, :state_dim] - Bd = Md[:state_dim, state_dim:] - else: - Identity = np.eye(state_dim) - Ad = Identity + A * dt - Bd = B * dt - - return Ad, Bd - -def get_discrete_linear_system_matrices(model, x_0, u_0): - '''Get discrete linear system matrices for a given model. - - Args: - model (ctrl.model) - x_0 (ndarray): The initial state. - u_0 (ndarray): The initial input. - - Returns: - A (ndarray): The discrete linear state matrix A. - B (ndarray): The discrete linear input matrix B. - ''' - - # Linearization. - df = model.df_func(x_0, u_0) - A, B = df[0].toarray(), df[1].toarray() - - # Discretize. - A, B = discretize_linear_system(A, B, model.dt) - - return A, B - -def onestep_dynamics(x, env_func, ctrl): - ''' one-step forward dynamics ''' - # get the format of the initial state - random_env = env_func(gui=False) - init_state_dict = {'init_x': x[0], 'init_x_dot': x[1], \ - 'init_theta': x[2], 'init_theta_dot': x[3]} - init_state, _ = random_env.reset(init_state = init_state_dict) - static_env = env_func(gui=False, random_state=False, init_state=init_state) - static_train_env = env_func(gui=False, randomized_init=False, init_state=init_state) - experiment = BaseExperiment(env=static_env, ctrl=ctrl, train_env=static_train_env) - trajs_data, _ = experiment.run_evaluation(training=False, n_steps=1, verbose=False) - x = trajs_data['obs'][0][-1] - static_env.close() - static_train_env.close() - random_env.close() - - return x - - - -class InvertedPendulum(object): - """Inverted Pendulum. - - Parameters - ---------- - mass : float - length : float - friction : float, optional - dt : float, optional - The sampling time. - normalization : tuple, optional - A tuple (Tx, Tu) of arrays used to normalize the state and actions. It - is so that diag(Tx) *x_norm = x and diag(Tu) * u_norm = u. - - """ - - def __init__(self, mass, length, friction=0, dt=1 / 80, - normalization=None): - """Initialization; see `InvertedPendulum`.""" - super(InvertedPendulum, self).__init__() - self.mass = mass - self.length = length - self.gravity = 9.81 - self.friction = friction - self.dt = dt - self.nx = 2 - self.nu = 1 - self.symbolic = None - - self.normalization = normalization - if normalization is not None: - self.normalization = [np.array(norm, dtype=config.np_dtype) - for norm in normalization] - self.inv_norm = [norm ** -1 for norm in self.normalization] - - def __call__(self, *args, **kwargs): - """Evaluate the function using the template to ensure variable sharing. - - Parameters - ---------- - args : list - The input arguments to the function. - kwargs : dict, optional - The keyword arguments to the function. - - Returns - ------- - outputs : list - The output arguments of the function as given by evaluate. - - """ - - outputs = self.forward(*args, **kwargs) - return outputs - - @property - def inertia(self): - """Return inertia of the pendulum.""" - return self.mass * self.length ** 2 - - def normalize(self, state, action): - """Normalize states and actions.""" - if self.normalization is None: - return state, action - - Tx_inv, Tu_inv = map(np.diag, self.inv_norm) - # if isinstance(Tx_inv, np.ndarray): - # Tx_inv = torch.from_numpy(Tx_inv) - # if isinstance(Tu_inv, np.ndarray): - # Tu_inv = torch.from_numpy(Tu_inv) - # state = tf.matmul(state, Tx_inv) - # state = torch.matmul(state, Tx_inv) - state = np.matmul(state, Tx_inv) - - if action is not None: - # action = tf.matmul(action, Tu_inv) - # action = torch.matmul(action, Tu_inv) - action = np.matmul(action, Tu_inv) - - return state, action - - def denormalize(self, state, action): - """De-normalize states and actions.""" - if self.normalization is None: - return state, action - - Tx, Tu = map(np.diag, self.normalization) - - # state = tf.matmul(state, Tx) - # convert to torch - # if isinstance(Tx, np.ndarray): - # Tx = torch.from_numpy(Tx) - # if isinstance(Tu, np.ndarray): - # Tu = torch.from_numpy(Tu) - - # state = torch.matmul(state, Tx) - state = np.matmul(state, Tx) - if action is not None: - # action = tf.matmul(action, Tu) - # action = torch.matmul(action, Tu) - action = np.matmul(action, Tu) - - return state, action - - def linearize(self): - """Return the linearized system. - - Returns - ------- - a : ndarray - The state matrix. - b : ndarray - The action matrix. - - """ - gravity = self.gravity - length = self.length - friction = self.friction - inertia = self.inertia - - A = np.array([[0, 1], - [gravity / length, -friction / inertia]], - dtype=config.np_dtype) - - B = np.array([[0], - [1 / inertia]], - dtype=config.np_dtype) - - if self.normalization is not None: - Tx, Tu = map(np.diag, self.normalization) - Tx_inv, Tu_inv = map(np.diag, self.inv_norm) - - A = np.linalg.multi_dot((Tx_inv, A, Tx)) - B = np.linalg.multi_dot((Tx_inv, B, Tu)) - - sys = signal.StateSpace(A, B, np.eye(2), np.zeros((2, 1))) - sysd = sys.to_discrete(self.dt) - return sysd.A, sysd.B - - # @concatenate_inputs(start=1) - def forward(self, state_action): - """Evaluate the dynamics.""" - # Denormalize - # state, action = tf.split(state_action, [2, 1], axis=1) - # state, action = torch.split(state_action, [2, 1], dim=0) - # print('np.split(state_action, [2, 1], axis=0)', np.split(state_action, [2], axis=0)) - state, action = np.split(state_action, [2], axis=0) - state, action = self.denormalize(state, action) - - n_inner = 10 - dt = self.dt / n_inner - for i in range(n_inner): - state_derivative = self.ode(state, action) - state = state + dt * state_derivative - - return self.normalize(state, None)[0] - - def ode(self, state, action): - """Compute the state time-derivative. - - Parameters - ---------- - states: ndarray or Tensor - Unnormalized states. - actions: ndarray or Tensor - Unnormalized actions. - - Returns - ------- - x_dot: Tensor - The normalized derivative of the dynamics - - """ - # Physical dynamics - gravity = self.gravity - length = self.length - friction = self.friction - inertia = self.inertia - - # angle, angular_velocity = tf.split(state, 2, axis=1) - # print('state', state) - # print('split result', torch.split(state, 1, dim=0)) - # print('np.split(state, [1], axis=0)', np.split(state, [1], axis=-1)) - # angle, angular_velocity = torch.split(state, 1, dim=-1) - angle, angular_velocity = np.split(state, [1], axis=-1) - - # x_ddot = gravity / length * tf.sin(angle) + action / inertia - # x_ddot = gravity / length * torch.sin(angle) + action / inertia - x_ddot = gravity / length * np.sin(angle) + action / inertia - - if friction > 0: - x_ddot -= friction / inertia * angular_velocity - - # state_derivative = tf.concat((angular_velocity, x_ddot), axis=1) - # state_derivative = torch.cat((angular_velocity, x_ddot), dim=-1) - state_derivative = np.concatenate((angular_velocity, x_ddot), axis=-1) - - # Normalize - return state_derivative - - def _setup_symbolic(self, prior_prop={}, **kwargs): - """Setup the casadi symbolic dynamics.""" - length = self.length - gravity = self.gravity - mass = self.mass - friction = self.friction - inertia = self.inertia # mass * length ** 2 - dt = self.dt - # Input variables. - theta = cs.MX.sym('theta') - theta_dot = cs.MX.sym('theta_dot') - X = cs.vertcat(theta, theta_dot) - U = cs.MX.sym('u') - nx = 2 - nu = 1 - # Dynamics. - theta_ddot = gravity / length * cs.sin(theta) + U / inertia - if friction > 0: - theta_ddot -= friction / inertia * theta_dot - X_dot = cs.vertcat(theta_dot, theta_ddot) - # Observation. - Y = cs.vertcat(theta, theta_dot) - # Define cost (quandratic form). - Q = cs.MX.sym('Q', nx, nx) - R = cs.MX.sym('R', nu, nu) - Xr = cs.MX.sym('Xr', nx, 1) - Ur = cs.MX.sym('Ur', nu, 1) - cost_func = 0.5 * (X - Xr).T @ Q @ (X - Xr) + 0.5 * (U - Ur).T @ R @ (U - Ur) - # Define dynamics and cost dictionaries. - dynamics = {'dyn_eqn': X_dot, 'obs_eqn': Y, 'vars': {'X': X, 'U': U}} - cost = {'cost_func': cost_func, 'vars': {'X': X, 'U': U, 'Xr': Xr, 'Ur': Ur, 'Q': Q, 'R': R}} - params = { - # prior inertial properties - 'pole_length': length, - 'pole_mass': mass, - # equilibrium point for linearization - 'X_EQ': np.zeros(self.nx), - 'U_EQ': np.atleast_2d(Ur)[0, :], - } - # Setup symbolic model. - self.symbolic = SymbolicModel(dynamics=dynamics, cost=cost, dt=dt, params=params) - -def compute_roa_pendulum(grid, closed_loop_dynamics, horizon=100, tol=1e-3, equilibrium=None, no_traj=True): - """Compute the largest ROA as a set of states in a discretization.""" - if isinstance(grid, np.ndarray): - all_points = grid - nindex = grid.shape[0] - ndim = grid.shape[1] - else: # grid is a GridWorld instance - all_points = grid.all_points - nindex = grid.nindex - ndim = grid.ndim - - # Forward-simulate all trajectories from initial points in the discretization - if no_traj: - end_states = all_points - for t in range(1, horizon): - end_states = closed_loop_dynamics(end_states) - else: - trajectories = np.empty((nindex, ndim, horizon)) - trajectories[:, :, 0] = all_points - for t in range(1, horizon): - # print('trajectories[:, :, t - 1]', trajectories[1, :, t - 1]) - # print('trajectories[:, :, t - 1].shape', trajectories[1, :, t - 1].shape) - # simulate all states in the grid - for state_index in range(nindex): - trajectories[state_index, :, t] = closed_loop_dynamics(trajectories[state_index, :, t - 1]) - - end_states = trajectories[:, :, -1] - - if equilibrium is None: - equilibrium = np.zeros((1, ndim)) - - # Compute an approximate ROA as all states that end up "close" to 0 - dists = np.linalg.norm(end_states - equilibrium, ord=2, axis=1, keepdims=True).ravel() - roa = (dists <= tol) - if no_traj: - return roa - else: - return roa, trajectories \ No newline at end of file diff --git a/tests/test_hpo/test_train.py b/tests/test_hpo/test_train.py index 8e2bcc28c..ef83011f9 100644 --- a/tests/test_hpo/test_train.py +++ b/tests/test_hpo/test_train.py @@ -91,6 +91,7 @@ def test_train_cartpole(SYS, TASK, ALGO, PRIOR, HYPERPARAMETER): # drop database drop(munch.Munch({'tag': f'{ALGO}_hpo'})) + @pytest.mark.parametrize('SYS', ['quadrotor_2D', 'quadrotor_2D_attitude']) @pytest.mark.parametrize('TASK', ['track']) @pytest.mark.parametrize('ALGO', ['ppo', 'sac', 'gp_mpc']) @@ -118,7 +119,7 @@ def test_train_quad(SYS, TASK, ALGO, PRIOR, HYPERPARAMETER): raise ValueError('optimimum hyperparameters are not available for quadrotor') else: raise ValueError('HYPERPARAMETER must be either default or optimimum') - + if ALGO == 'gp_mpc': PRIOR = '150' sys.argv[1:] = ['--algo', ALGO, @@ -142,7 +143,7 @@ def test_train_quad(SYS, TASK, ALGO, PRIOR, HYPERPARAMETER): '--opt_hps', opt_hp_path, '--seed', '6', '--use_gpu', 'True' - ] + ] fac = ConfigFactory() fac.add_argument('--opt_hps', type=str, default='', help='yaml file as a result of HPO.')